lang
stringclasses 1
value | license
stringclasses 13
values | stderr
stringlengths 0
350
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 7
45.1k
| new_contents
stringlengths 0
1.87M
| new_file
stringlengths 6
292
| old_contents
stringlengths 0
1.87M
| message
stringlengths 6
9.26k
| old_file
stringlengths 6
292
| subject
stringlengths 0
4.45k
|
---|---|---|---|---|---|---|---|---|---|---|---|
Java | mit | 7e9b8f719e19a6db9eb5bae30e4d958d26f65ee1 | 0 | SoftwareEngineeringToolDemos/FSE-2011-Crystal,SoftwareEngineeringToolDemos/FSE-2011-Crystal,KaranDagar/crystalvc,KaranDagar/crystalvc,SoftwareEngineeringToolDemos/FSE-2011-Crystal,brunyuriy/crystalvc,KaranDagar/FSE-2011-Crystal---old,KaranDagar/FSE-2011-Crystal---old,brunyuriy/crystalvc | package crystal.client;
import java.awt.BorderLayout;
import java.awt.Container;
import java.util.Collections;
import java.util.Comparator;
import java.util.Hashtable;
import java.util.List;
import java.util.Vector;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.SwingWorker;
import crystal.model.ConflictResult;
import crystal.model.DataSource;
import crystal.model.ConflictResult.ResultStatus;
/**
* Conflict Client UI; displays the view showing the state of the repositories contained in the preferences.
*
* @author rtholmes
*
*/
public class ConflictClient implements IConflictClient {
/**
* This class enables the calcualtions to happen on a background thread but _STILL_ update the UI. When we were
* doing the analysis on a regular Thread the UI woudln't update until all of the tasks were done; the UI didn't
* block, but it didn't update either. This fixes that problem.
*
* @author rtholmes
*/
class CalculateTask extends SwingWorker<Void, ConflictResult> {
ProjectPreferences _prefs;
DataSource _source;
/**
* Constructor.
*
* @param source
* @param prefs
*/
CalculateTask(DataSource source, ProjectPreferences prefs) {
_source = source;
_prefs = prefs;
}
@Override
protected Void doInBackground() throws Exception {
ConflictResult calculatingPlaceholder = new ConflictResult(_source, ResultStatus.PENDING);
publish(calculatingPlaceholder);
ConflictResult result = ConflictDaemon.calculateConflict(_source, _prefs);
publish(result);
return null;
}
@Override
protected void process(List<ConflictResult> chunks) {
for (ConflictResult cr : chunks) {
setStatus(cr);
}
}
}
/**
* UI frame.
*/
private JFrame _frame = null;
/**
* Preference store used by the client.
*/
private ClientPreferences _preferences;
/**
* Stores the results of the analysis. This provides a simple decoupling between the DataSource and the
* ConflictResult.
*/
Hashtable<DataSource, ConflictResult> resultMap = new Hashtable<DataSource, ConflictResult>();
/**
* Runs the analysis on any any projects described by the preferences.
*/
public void calculateConflicts() {
for (ProjectPreferences projPref : _preferences.getProjectPreference()) {
for (final DataSource source : projPref.getDataSources()) {
CalculateTask ct = new CalculateTask(source, projPref);
ct.execute();
}
}
}
/**
* Close the ConflictClient UI.
*/
public void close() {
_frame.setVisible(false);
}
/**
* Creates the UI and brings it to the foreground.
*
* @param prefs
* Preferences used to populate the UI with.
*/
public void createAndShowGUI(ClientPreferences prefs) {
_preferences = prefs;
// Create and set up the window.
_frame = new JFrame("Conflict Client");
// set all cells to pending on initial load
// NOTE: caching might be a good idea here in the future.
for (ProjectPreferences projPref : prefs.getProjectPreference()) {
for (DataSource source : projPref.getDataSources()) {
resultMap.put(source, new ConflictResult(source, ResultStatus.PENDING));
}
}
refresh();
}
/**
* Creates the HTML for header row for a project.
*
* @param prefs
* ProjectPreferences to consider.
* @param numColumns
* The maximum number of columns that should be displayed; enables HTML padding.
* @return the HTML for the project rows.
*/
private String createHeader(ProjectPreferences projectPreferences, int numColumns) {
String pre = "<tr>";
String rows = "";
rows += "<td><b></b></td>";
Vector<DataSource> sources = new Vector<DataSource>();
sources.addAll(projectPreferences.getDataSources());
Collections.sort(sources, new Comparator<DataSource>() {
@Override
public int compare(DataSource o1, DataSource o2) {
return o1.getShortName().compareTo(o2.getShortName());
}
});
for (DataSource source : sources) {
String rPre = "";
String rBody = "<td><b>" + source.getShortName() + "</b></td>";
String rPost = "";
rows += rPre + rBody + rPost;
}
String post = "";
if (numColumns > projectPreferences.getDataSources().size()) {
for (int i = 0; i < numColumns - projectPreferences.getDataSources().size(); i++) {
post += "<td></td>";
}
}
post += "</tr>";
return pre + rows + post;
}
/**
* Creates the HTML for content row for a project.
*
* @param prefs
* ProjectPreferences to consider.
* @param numColumns
* The maximum number of columns that should be displayed; enables HTML padding.
* @return the HTML for the project rows.
*/
private String createProjectRow(ProjectPreferences prefs, int numColumns) {
String pre = "<tr>";
String rows = "";
// my status
rows += "<td>" + prefs.getEnvironment().getShortName() + "</td>";
// sort the columns so they're stable in subsequent runs of the client
Vector<DataSource> sources = new Vector<DataSource>();
sources.addAll(prefs.getDataSources());
Collections.sort(sources, new Comparator<DataSource>() {
@Override
public int compare(DataSource o1, DataSource o2) {
return o1.getShortName().compareTo(o2.getShortName());
}
});
for (DataSource source : sources) {
String rPre = "";
String rBody = "";
if (resultMap.containsKey(source)) {
ResultStatus status = resultMap.get(source).getStatus();
String bgColour = "";
String icon = "";
String DEFAULT_BG = "grey";
if (status.equals(ResultStatus.SAME)) {
bgColour = DEFAULT_BG;//"white";
icon = "same.png";
} else if (status.equals(ResultStatus.AHEAD)) {
bgColour = DEFAULT_BG;//"yellow";
icon = "ahead.png";
} else if (status.equals(ResultStatus.BEHIND)) {
bgColour = DEFAULT_BG;//"#FFA500";
icon = "behind.png";
} else if (status.equals(ResultStatus.MERGECLEAN)) {
bgColour = DEFAULT_BG;//i dunno;
icon = "merge.png";
} else if (status.equals(ResultStatus.MERGECONFLICT)) {
bgColour = DEFAULT_BG;//"red";
icon = "mergeconflict.png";
} else if (status.equals(ResultStatus.PENDING)) {
bgColour = DEFAULT_BG;//"#CCCCFF";
icon = "clock.png";
}
String iconPrefix = "http://www.cs.washington.edu/homes/rtholmes/tmp/speculationImages/";
rBody = "<td align='center' bgcolor='" + bgColour + "'>" + "<img src='" + iconPrefix + icon + "'/>" + "</td>";
} else {
rBody = "<td align='center'>" + "n/a" + "</td>";
}
String rPost = "";
rows += rPre + rBody + rPost;
}
String post = "";
if (numColumns > sources.size()) {
for (int i = 0; i < numColumns - sources.size(); i++) {
post += "<td></td>";
}
}
post += "</tr>";
return pre + rows + post;
}
/**
* Creates the body of the ConflictClient UI. Right now this simply makes a HTML table and fires it into the space
* since that is a lot easier than dealing with Swing UI elements.
*
* @param prefs
* preferences used to create the body representaiton.
* @return HTML corresponding to the UI body.
*/
private String createText(ClientPreferences prefs) {
String pre = "<html>";
String post = "</html>";
String body = "";
int maxSources = 0;
for (ProjectPreferences pPref : prefs.getProjectPreference()) {
int numSources = pPref.getDataSources().size();
if (numSources > maxSources)
maxSources = numSources;
}
for (ProjectPreferences pPref : prefs.getProjectPreference()) {
String rowText = createHeader(pPref, maxSources) + createProjectRow(pPref, maxSources);
System.out.println("ConflictClient::createText(..) - row text: " + rowText);
body += rowText;
}
String retValue = pre + body + post;
System.out.println("ConflictClient::createText(..): " + retValue);
return retValue;
}
/**
* Refreshes the UI.
*/
private void refresh() {
_frame.getContentPane().removeAll();
Container contentPane = _frame.getContentPane();
JLabel content = new JLabel();
String labelText = createText(_preferences);
content.setText(labelText);
contentPane.add(content, BorderLayout.CENTER);
// Display the window.
_frame.pack();
_frame.setVisible(true);
}
@Override
public void setStatus(ConflictResult result) {
System.out.println("ConflictClient::setStatus( " + result + ")");
resultMap.put(result.getDataSource(), result);
refresh();
}
} | src/crystal/client/ConflictClient.java | package crystal.client;
import java.awt.BorderLayout;
import java.awt.Container;
import java.util.Collections;
import java.util.Comparator;
import java.util.Hashtable;
import java.util.List;
import java.util.Vector;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.SwingWorker;
import crystal.model.ConflictResult;
import crystal.model.DataSource;
import crystal.model.ConflictResult.ResultStatus;
/**
* Conflict Client UI; displays the view showing the state of the repositories contained in the preferences.
*
* @author rtholmes
*
*/
public class ConflictClient implements IConflictClient {
/**
* This class enables the calcualtions to happen on a background thread but _STILL_ update the UI. When we were
* doing the analysis on a regular Thread the UI woudln't update until all of the tasks were done; the UI didn't
* block, but it didn't update either. This fixes that problem.
*
* @author rtholmes
*/
class CalculateTask extends SwingWorker<Void, ConflictResult> {
ProjectPreferences _prefs;
DataSource _source;
/**
* Constructor.
*
* @param source
* @param prefs
*/
CalculateTask(DataSource source, ProjectPreferences prefs) {
_source = source;
_prefs = prefs;
}
@Override
protected Void doInBackground() throws Exception {
ConflictResult calculatingPlaceholder = new ConflictResult(_source, ResultStatus.PENDING);
publish(calculatingPlaceholder);
ConflictResult result = ConflictDaemon.calculateConflict(_source, _prefs);
publish(result);
return null;
}
@Override
protected void process(List<ConflictResult> chunks) {
for (ConflictResult cr : chunks) {
setStatus(cr);
}
}
}
/**
* UI frame.
*/
private JFrame _frame = null;
/**
* Preference store used by the client.
*/
private ClientPreferences _preferences;
/**
* Stores the results of the analysis. This provides a simple decoupling between the DataSource and the
* ConflictResult.
*/
Hashtable<DataSource, ConflictResult> resultMap = new Hashtable<DataSource, ConflictResult>();
/**
* Runs the analysis on any any projects described by the preferences.
*/
public void calculateConflicts() {
for (ProjectPreferences projPref : _preferences.getProjectPreference()) {
for (final DataSource source : projPref.getDataSources()) {
CalculateTask ct = new CalculateTask(source, projPref);
ct.execute();
}
}
}
/**
* Close the ConflictClient UI.
*/
public void close() {
_frame.setVisible(false);
}
/**
* Creates the UI and brings it to the foreground.
*
* @param prefs
* Preferences used to populate the UI with.
*/
public void createAndShowGUI(ClientPreferences prefs) {
_preferences = prefs;
// Create and set up the window.
_frame = new JFrame("Conflict Client");
// set all cells to pending on initial load
// NOTE: caching might be a good idea here in the future.
for (ProjectPreferences projPref : prefs.getProjectPreference()) {
for (DataSource source : projPref.getDataSources()) {
resultMap.put(source, new ConflictResult(source, ResultStatus.PENDING));
}
}
refresh();
}
/**
* Creates the HTML for header row for a project.
*
* @param prefs
* ProjectPreferences to consider.
* @param numColumns
* The maximum number of columns that should be displayed; enables HTML padding.
* @return the HTML for the project rows.
*/
private String createHeader(ProjectPreferences projectPreferences, int numColumns) {
String pre = "<tr>";
String rows = "";
rows += "<td><b></b></td>";
Vector<DataSource> sources = new Vector<DataSource>();
sources.addAll(projectPreferences.getDataSources());
Collections.sort(sources, new Comparator<DataSource>() {
@Override
public int compare(DataSource o1, DataSource o2) {
return o1.getShortName().compareTo(o2.getShortName());
}
});
for (DataSource source : sources) {
String rPre = "";
String rBody = "<td><b>" + source.getShortName() + "</b></td>";
String rPost = "";
rows += rPre + rBody + rPost;
}
String post = "";
if (numColumns > projectPreferences.getDataSources().size()) {
for (int i = 0; i < numColumns - projectPreferences.getDataSources().size(); i++) {
post += "<td></td>";
}
}
post += "</tr>";
return pre + rows + post;
}
/**
* Creates the HTML for content row for a project.
*
* @param prefs
* ProjectPreferences to consider.
* @param numColumns
* The maximum number of columns that should be displayed; enables HTML padding.
* @return the HTML for the project rows.
*/
private String createProjectRow(ProjectPreferences prefs, int numColumns) {
String pre = "<tr>";
String rows = "";
// my status
rows += "<td>" + prefs.getEnvironment().getShortName() + "</td>";
// sort the columns so they're stable in subsequent runs of the client
Vector<DataSource> sources = new Vector<DataSource>();
sources.addAll(prefs.getDataSources());
Collections.sort(sources, new Comparator<DataSource>() {
@Override
public int compare(DataSource o1, DataSource o2) {
return o1.getShortName().compareTo(o2.getShortName());
}
});
for (DataSource source : sources) {
String rPre = "";
String rBody = "";
if (resultMap.containsKey(source)) {
ResultStatus status = resultMap.get(source).getStatus();
String bgColour = "";
String icon = "";
String DEFAULT_BG = "grey";
if (status.equals(ResultStatus.SAME)) {
bgColour = DEFAULT_BG;//"white";
icon = "same.png";
} else if (status.equals(ResultStatus.AHEAD)) {
bgColour = DEFAULT_BG;//"yellow";
icon = "ahead.png";
} else if (status.equals(ResultStatus.BEHIND)) {
bgColour = DEFAULT_BG;//"#FFA500";
icon = "behind.png";
} else if (status.equals(ResultStatus.MERGECLEAN)) {
bgColour = DEFAULT_BG;//i dunno;
icon = "merge.png";
} else if (status.equals(ResultStatus.MERGECONFLICT)) {
bgColour = DEFAULT_BG;//"red";
icon = "mergeconflict.png";
} else if (status.equals(ResultStatus.PENDING)) {
bgColour = DEFAULT_BG;//"#CCCCFF";
icon = "clock.png";
}
String iconPrefix = "http://www.cs.washington.edu/homes/rtholmes/tmp/speculationImages/";
rBody = "<td align='center' bgcolor='" + bgColour + "'>" + "<img src='" + iconPrefix + icon + "' height='32px'/>" + "</td>";
} else {
rBody = "<td align='center'>" + "n/a" + "</td>";
}
String rPost = "";
rows += rPre + rBody + rPost;
}
String post = "";
if (numColumns > sources.size()) {
for (int i = 0; i < numColumns - sources.size(); i++) {
post += "<td></td>";
}
}
post += "</tr>";
return pre + rows + post;
}
/**
* Creates the body of the ConflictClient UI. Right now this simply makes a HTML table and fires it into the space
* since that is a lot easier than dealing with Swing UI elements.
*
* @param prefs
* preferences used to create the body representaiton.
* @return HTML corresponding to the UI body.
*/
private String createText(ClientPreferences prefs) {
String pre = "<html>";
String post = "</html>";
String body = "";
int maxSources = 0;
for (ProjectPreferences pPref : prefs.getProjectPreference()) {
int numSources = pPref.getDataSources().size();
if (numSources > maxSources)
maxSources = numSources;
}
for (ProjectPreferences pPref : prefs.getProjectPreference()) {
String rowText = createHeader(pPref, maxSources) + createProjectRow(pPref, maxSources);
System.out.println("ConflictClient::createText(..) - row text: " + rowText);
body += rowText;
}
String retValue = pre + body + post;
System.out.println("ConflictClient::createText(..): " + retValue);
return retValue;
}
/**
* Refreshes the UI.
*/
private void refresh() {
_frame.getContentPane().removeAll();
Container contentPane = _frame.getContentPane();
JLabel content = new JLabel();
String labelText = createText(_preferences);
content.setText(labelText);
contentPane.add(content, BorderLayout.CENTER);
// Display the window.
_frame.pack();
_frame.setVisible(true);
}
@Override
public void setStatus(ConflictResult result) {
System.out.println("ConflictClient::setStatus( " + result + ")");
resultMap.put(result.getDataSource(), result);
refresh();
}
} | alt tag doesn't work on img tags after all
| src/crystal/client/ConflictClient.java | alt tag doesn't work on img tags after all |
|
Java | mit | 3066241f2cdc73181d86f161a06bd152be65fd32 | 0 | StevenThuriot/MOP | package model;
import java.util.GregorianCalendar;
import java.util.List;
public class SuccessfulTaskState extends TaskState {
protected SuccessfulTaskState(Task context) {
super(context);
}
/**
* Returns whether a task can be executed right now.
* This is true when all its dependencies are (successfully) finished and
* all of its required resources are available.
*/
@Override
protected Boolean canBeExecuted(){
boolean resourceReady = true;
GregorianCalendar now = new GregorianCalendar();
for(Resource r: this.getContext().getRequiredResources()){
resourceReady = resourceReady && (r.availableAt(now, this.getContext().getDuration()));
}
return resourceReady;
}
/**
* Returns a boolean indicating whether the current task can be finished.
* A task can not be finished when it is failed or any of its dependencies is failed.
*/
@Override
protected boolean canBeFinished() {
return true;
}
/**
* Returns whether a task is performed or not.
* @return
*/
@Override
protected Boolean isPerformed()
{
return true;
}
/**
* Returns whether a task is succesful or not.
* @return
*/
@Override
protected Boolean isSuccesful()
{
return true;
}
/**
* Returns whether the current task satisfies the business rule 2.
* @return Boolean
*/
protected Boolean satisfiesBusinessRule2()
{
List<Task> list = this.getContext().getTaskDependencyManager().getDependencies();
boolean failed = false;
boolean unfinished = false;
for (Task task : list) {
if (task.isFailed()) {
failed = true;
break;
}
if (task.isUnfinished()) {
unfinished = true;
break;
}
}
if (failed || unfinished) {
return false;
}
return true;
}
/**
* Returns whether the current task satisfies the business rule 3.
* @return Boolean
*/
protected Boolean satisfiesBusinessRule3()
{
GregorianCalendar currentTime = new GregorianCalendar();
GregorianCalendar startTime = this.getContext().getStartDate();
//Not before start time
if ( !currentTime.before(startTime) ) {
return true;
}
return false;
}
@Override
public String toString() {
return "Successful";
}
}
| Java/src/model/SuccessfulTaskState.java | package model;
import java.util.GregorianCalendar;
import java.util.List;
public class SuccessfulTaskState extends TaskState {
protected SuccessfulTaskState(Task context) {
super(context);
}
/**
* Returns whether a task can be executed right now.
* This is true when all its dependencies are (successfully) finished and
* all of its required resources are available.
*/
@Override
protected Boolean canBeExecuted(){
boolean resourceReady = true;
GregorianCalendar now = new GregorianCalendar();
for(Resource r: this.getContext().getRequiredResources()){
resourceReady = resourceReady && (r.availableAt(now, this.getContext().getDuration()));
}
return resourceReady;
}
/**
* Returns a boolean indicating whether the current task can be finished.
* A task can not be finished when it is failed or any of its dependencies is failed.
*/
@Override
protected boolean canBeFinished() {
return true;
}
/**
* Returns whether a task is performed or not.
* @return
*/
@Override
protected Boolean isPerformed()
{
return true;
}
/**
* Returns whether a task is succesful or not.
* @return
*/
@Override
protected Boolean isSuccesful()
{
return true;
}
/**
* Returns whether the current task satisfies the business rule 2.
* @return Boolean
*/
protected Boolean satisfiesBusinessRule2()
{
List<Task> list = this.getContext().getTaskDependencyManager().getDependencies();
boolean failed = false;
boolean unfinished = false;
for (Task task : list) {
if (task.isFailed()) {
failed = true;
break;
}
if (task.isUnfinished()) {
unfinished = true;
break;
}
}
if (failed || unfinished) {
return false;
}
return true;
}
/**
* Returns whether the current task satisfies the business rule 3.
* @return Boolean
*/
protected Boolean satisfiesBusinessRule3()
{
GregorianCalendar currentTime = new GregorianCalendar();
GregorianCalendar startTime = this.getContext().getStartDate();
GregorianCalendar dueTime = this.getContext().getDueDate();
boolean answer = false;
//Not before start time
if ( !currentTime.before(startTime) ) {
//Rule succeeds, continue to next check
answer = true;
}
//Not after or at the duetime
if ( !( answer && !currentTime.before(dueTime) )) {
//Rule fails
answer = false;
}
return answer;
}
@Override
public String toString() {
return "Successful";
}
}
| Corrected BR3 in the Successful state | Java/src/model/SuccessfulTaskState.java | Corrected BR3 in the Successful state |
|
Java | mit | b22889a002322575aa4c16491dce00f7b6bc791f | 0 | Permafrost/TundraTN.java,Permafrost/TundraTN.java | /*
* The MIT License (MIT)
*
* Copyright (c) 2015 Lachlan Dowding
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package permafrost.tundra.tn.delivery;
import com.wm.app.b2b.server.InvokeState;
import com.wm.app.b2b.server.ServerAPI;
import com.wm.app.b2b.server.Service;
import com.wm.app.b2b.server.ServiceException;
import com.wm.app.b2b.server.Session;
import com.wm.app.b2b.server.scheduler.ScheduledTask;
import com.wm.app.tn.db.Datastore;
import com.wm.app.tn.db.QueueOperations;
import com.wm.app.tn.db.SQLWrappers;
import com.wm.app.tn.delivery.DeliveryQueue;
import com.wm.app.tn.delivery.DeliverySchedule;
import com.wm.app.tn.delivery.GuaranteedJob;
import com.wm.app.tn.doc.BizDocEnvelope;
import com.wm.data.IData;
import com.wm.data.IDataCursor;
import com.wm.data.IDataFactory;
import com.wm.data.IDataUtil;
import com.wm.lang.ns.NSName;
import com.wm.util.Masks;
import permafrost.tundra.data.IDataHelper;
import permafrost.tundra.lang.BooleanHelper;
import permafrost.tundra.lang.ExceptionHelper;
import permafrost.tundra.lang.IdentityHelper;
import permafrost.tundra.lang.StringHelper;
import permafrost.tundra.lang.ThreadHelper;
import permafrost.tundra.server.BlockingServerThreadPoolExecutor;
import permafrost.tundra.time.DateTimeHelper;
import permafrost.tundra.tn.document.BizDocEnvelopeHelper;
import permafrost.tundra.tn.profile.ProfileCache;
import permafrost.tundra.util.concurrent.DirectExecutorService;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.text.MessageFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ThreadPoolExecutor;
/**
* A collection of convenience methods for working with Trading Networks delivery queues.
*/
public final class DeliveryQueueHelper {
/**
* SQL statement to select head of a delivery queue in job creation datetime order.
*/
private static final String SELECT_NEXT_DELIVERY_JOB_ORDERED_SQL = "SELECT JobID FROM DeliveryJob WHERE QueueName = ? AND JobStatus = 'QUEUED' AND TimeCreated = (SELECT MIN(TimeCreated) FROM DeliveryJob WHERE QueueName = ? AND JobStatus = 'QUEUED') AND TimeUpdated <= ?";
/**
* SQL statement to select head of a delivery queue in indeterminate order.
*/
private static final String SELECT_NEXT_DELIVERY_JOB_UNORDERED_SQL = "SELECT JobID FROM DeliveryJob WHERE QueueName = ? AND JobStatus = 'QUEUED' AND TimeCreated = (SELECT MIN(TimeCreated) FROM DeliveryJob WHERE QueueName = ? AND JobStatus = 'QUEUED' AND TimeUpdated <= ?)";
/**
* The age a delivery job must be before it is eligible to be processed.
*/
private static final long DELIVERY_JOB_AGE_THRESHOLD_MILLISECONDS = 750L;
/**
* The name of the service that Trading Networks uses to invoke delivery queue processing services.
*/
private static final String DELIVER_BATCH_SERVICE_NAME = "wm.tn.queuing:deliverBatch";
/**
* The name of the service used to update the completion status of a delivery queue job.
*/
private static final NSName UPDATE_QUEUED_TASK_SERVICE_NAME = NSName.create("wm.tn.queuing:updateQueuedTask");
/**
* The name of the service used to update a delivery queue.
*/
private static final NSName UPDATE_QUEUE_SERVICE_NAME = NSName.create("wm.tn.queuing:updateQueue");
/**
* The minimum wait between each poll of a delivery queue for more jobs.
*/
private static final long MIN_WAIT_BETWEEN_DELIVERY_QUEUE_POLLS_MILLISECONDS = 1L;
/**
* The wait between each refresh of a delivery queue settings from the database.
*/
private static final long WAIT_BETWEEN_DELIVERY_QUEUE_REFRESH_MILLISECONDS = 5L * 1000L;
/**
* The suffix used on worker thread names.
*/
private static final String WORKER_THREAD_SUFFIX = ": Worker";
/**
* The suffix used on supervisor thread names.
*/
private static final String SUPERVISOR_THREAD_SUFFIX = ": Supervisor";
/**
* The bizdoc user status to use when a job is dequeued.
*/
private static final String DEQUEUED_USER_STATUS = "DEQUEUED";
/**
* Disallow instantiation of this class.
*/
private DeliveryQueueHelper() {}
/**
* Returns the Trading Networks delivery queue associated with the given name.
*
* @param queueName The name of the queue to return.
* @return The delivery queue with the given name.
* @throws ServiceException If a database error occurs.
*/
public static DeliveryQueue get(String queueName) throws ServiceException {
if (queueName == null) return null;
DeliveryQueue queue = null;
try {
queue = QueueOperations.selectByName(queueName);
} catch(SQLException ex) {
ExceptionHelper.raise(ex);
} catch(IOException ex) {
ExceptionHelper.raise(ex);
}
return queue;
}
/**
* Refreshes the given Trading Networks delivery queue from the database.
*
* @param queue The queue to be refreshed.
* @return The given queue, refreshed from the database.
* @throws ServiceException If a database error occurs.
*/
public static DeliveryQueue refresh(DeliveryQueue queue) throws ServiceException {
return get(queue.getQueueName());
}
/**
* Returns a list of all registered Trading Networks delivery queues.
*
* @return A list of all registered Trading Networks delivery queues.
* @throws ServiceException If a database error occurs.
*/
public static DeliveryQueue[] list() throws ServiceException {
DeliveryQueue[] output = null;
try {
output = QueueOperations.select(null);
} catch(SQLException ex) {
ExceptionHelper.raise(ex);
} catch(IOException ex) {
ExceptionHelper.raise(ex);
}
return output;
}
/**
* Enables the delivery of the given Trading Networks delivery queue.
*
* @param queue The queue to enable delivery on.
* @throws ServiceException If a database error occurs.
*/
public static void enable(DeliveryQueue queue) throws ServiceException {
if (queue == null) return;
queue.setState(DeliveryQueue.STATE_ENABLED);
save(queue);
}
/**
* Disables the delivery of the given Trading Networks delivery queue.
*
* @param queue The queue to enable delivery on.
* @throws ServiceException If a database error occurs.
*/
public static void disable(DeliveryQueue queue) throws ServiceException {
if (queue == null) return;
queue.setState(DeliveryQueue.STATE_DISABLED);
save(queue);
}
/**
* Drains the delivery of the given Trading Networks delivery queue.
*
* @param queue The queue to enable delivery on.
* @throws ServiceException If a database error occurs.
*/
public static void drain(DeliveryQueue queue) throws ServiceException {
if (queue == null) return;
queue.setState(DeliveryQueue.STATE_DRAINING);
save(queue);
}
/**
* Suspends the delivery of the given Trading Networks delivery queue.
*
* @param queue The queue to enable delivery on.
* @throws ServiceException If a database error occurs.
*/
public static void suspend(DeliveryQueue queue) throws ServiceException {
if (queue == null) return;
queue.setState(DeliveryQueue.STATE_SUSPENDED);
save(queue);
}
/**
* Returns the number of jobs currently queued in the given Trading Networks delivery queue.
*
* @param queue The queue to return the length of.
* @return The length of the given queue, which is the number of delivery jobs with a status
* of QUEUED or DELIVERING.
* @throws ServiceException If a database error occurs.
*/
public static int length(DeliveryQueue queue) throws ServiceException {
int length = 0;
if (queue != null) {
try {
String[] jobs = QueueOperations.getQueuedJobs(queue.getQueueName());
if (jobs != null) length = jobs.length;
} catch(SQLException ex) {
ExceptionHelper.raise(ex);
}
}
return length;
}
/**
* Updates the given Trading Networks delivery queue with any changes that may have occurred.
*
* @param queue The queue whose changes are to be saved.
* @throws ServiceException If a database error occurs.
*/
public static void save(DeliveryQueue queue) throws ServiceException {
if (queue == null) return;
try {
IData pipeline = IDataFactory.create();
IDataCursor cursor = pipeline.getCursor();
IDataUtil.put(cursor, "queue", queue);
cursor.destroy();
Service.doInvoke(UPDATE_QUEUE_SERVICE_NAME, pipeline);
} catch(Exception ex) {
ExceptionHelper.raise(ex);
}
}
/**
* Returns the head of the given delivery queue without dequeuing it.
*
* @param queue The delivery queue whose head job is to be returned.
* @param ordered Whether jobs should be dequeued in strict creation datetime first in first out (FIFO) order.
* @return The job at the head of the given queue, or null if the queue is empty.
* @throws ServiceException If a database error occurs.
*/
public static GuaranteedJob peek(DeliveryQueue queue, boolean ordered) throws ServiceException {
if (queue == null) return null;
Connection connection = null;
PreparedStatement statement = null;
ResultSet results = null;
GuaranteedJob job = null;
try {
connection = Datastore.getConnection();
statement = connection.prepareStatement(ordered ? SELECT_NEXT_DELIVERY_JOB_ORDERED_SQL : SELECT_NEXT_DELIVERY_JOB_UNORDERED_SQL);
statement.clearParameters();
String queueName = queue.getQueueName();
SQLWrappers.setChoppedString(statement, 1, queueName, "DeliveryQueue.QueueName");
SQLWrappers.setChoppedString(statement, 2, queueName, "DeliveryQueue.QueueName");
SQLWrappers.setTimestamp(statement, 3, new Timestamp(System.currentTimeMillis() - DELIVERY_JOB_AGE_THRESHOLD_MILLISECONDS));
results = statement.executeQuery();
if (results.next()) {
job = GuaranteedJobHelper.get(results.getString(1));
}
connection.commit();
} catch (SQLException ex) {
connection = Datastore.handleSQLException(connection, ex);
ExceptionHelper.raise(ex);
} finally {
SQLWrappers.close(results);
SQLWrappers.close(statement);
Datastore.releaseConnection(connection);
}
return job;
}
/**
* Dequeues the job at the head of the given delivery queue.
*
* @param queue The delivery queue to dequeue the head job from.
* @param ordered Whether jobs should be dequeued in strict creation datetime first in first out (FIFO) order.
* @return The dequeued job that was at the head of the given queue, or null if queue is empty.
* @throws ServiceException If a database error occurs.
*/
public static GuaranteedJob pop(DeliveryQueue queue, boolean ordered) throws ServiceException {
GuaranteedJob job = peek(queue, ordered);
GuaranteedJobHelper.setDelivering(job);
return job;
}
/**
* Callable for invoking a given service against a given job.
*/
private static class CallableGuaranteedJob implements Callable<IData> {
/**
* The job against which the service will be invoked.
*/
private GuaranteedJob job;
/**
* The delivery queue from which the job was dequeued.
*/
private DeliveryQueue queue;
/**
* The service to be invoked.
*/
private NSName service;
/**
* The pipeline the service is invoked with.
*/
private IData pipeline;
/**
* The session the service is invoked under.
*/
private Session session;
/**
* The retry settings to be used when retrying the job.
*/
private int retryLimit, retryFactor, timeToWait;
/**
* Whether the deliver queue should be suspended on retry exhaustion.
*/
private boolean suspend;
/**
* Whether the owning bizdoc's status should be changed to reflect job success/failure.
*/
private boolean statusSilence;
/**
* The time the job was dequeued.
*/
private long timeDequeued;
/**
* Creates a new CallableGuaranteedJob which when called invokes the given service against the given job.
*
* @param job The job to be processed.
* @param service The service to be invoked to process the given job.
* @param session The session used when invoking the given service.
* @param pipeline The input pipeline used when invoking the given service.
* @param retryLimit The number of retries this job should attempt.
* @param retryFactor The factor used to extend the time to wait on each retry.
* @param timeToWait The time in seconds to wait between each retry.
* @param suspend Whether to suspend the delivery queue on job retry exhaustion.
*/
public CallableGuaranteedJob(DeliveryQueue queue, GuaranteedJob job, String service, Session session, IData pipeline, int retryLimit, int retryFactor, int timeToWait, boolean suspend) {
this(queue, job, service == null ? null : NSName.create(service), session, pipeline, retryLimit, retryFactor, timeToWait, suspend);
}
/**
* Creates a new CallableGuaranteedJob which when called invokes the given service against the given job.
*
* @param job The job to be processed.
* @param service The service to be invoked to process the given job.
* @param session The session used when invoking the given service.
* @param pipeline The input pipeline used when invoking the given service.
* @param retryLimit The number of retries this job should attempt.
* @param retryFactor The factor used to extend the time to wait on each retry.
* @param timeToWait The time in seconds to wait between each retry.
* @param suspend Whether to suspend the delivery queue on job retry exhaustion.
*/
public CallableGuaranteedJob(DeliveryQueue queue, GuaranteedJob job, NSName service, Session session, IData pipeline, int retryLimit, int retryFactor, int timeToWait, boolean suspend) {
if (queue == null) throw new NullPointerException("queue must not be null");
if (job == null) throw new NullPointerException("job must not be null");
if (service == null) throw new NullPointerException("service must not be null");
this.queue = queue;
this.job = job;
this.service = service;
this.session = session;
this.pipeline = pipeline == null ? IDataFactory.create() : IDataHelper.duplicate(pipeline);
this.retryLimit = retryLimit;
this.retryFactor = retryFactor;
this.timeToWait = timeToWait;
this.suspend = suspend;
this.statusSilence = getStatusSilence(queue);
}
/**
* Invokes the provided service with the provided pipeline and session against the job.
*
* @return The output pipeline returned by the invocation.
* @throws Exception If the service encounters an error.
*/
public IData call() throws Exception {
IData output = null;
Thread owningThread = Thread.currentThread();
String owningThreadPrefix = owningThread.getName();
try {
timeDequeued = System.currentTimeMillis();
BizDocEnvelope bizdoc = job.getBizDocEnvelope();
owningThread.setName(MessageFormat.format("{0}: Task={1} Time={2} STARTED", owningThreadPrefix, job.getJobId(), DateTimeHelper.now("datetime")));
if (bizdoc != null) {
BizDocEnvelopeHelper.setStatus(job.getBizDocEnvelope(), null, DEQUEUED_USER_STATUS, statusSilence);
}
GuaranteedJobHelper.log(job, "MESSAGE", "Processing", MessageFormat.format("Dequeued from {0} queue \"{1}\"", queue.getQueueType(), queue.getQueueName()), MessageFormat.format("Service \"{0}\" attempting to process document", service.getFullName()));
IDataCursor cursor = pipeline.getCursor();
IDataUtil.put(cursor, "$task", job);
if (bizdoc != null) {
bizdoc = BizDocEnvelopeHelper.get(bizdoc.getInternalId(), true);
IDataUtil.put(cursor, "bizdoc", bizdoc);
IDataUtil.put(cursor, "sender", ProfileCache.getInstance().get(bizdoc.getSenderId()));
IDataUtil.put(cursor, "receiver", ProfileCache.getInstance().get(bizdoc.getReceiverId()));
}
cursor.destroy();
output = Service.doInvoke(service, session, pipeline);
owningThread.setName(MessageFormat.format("{0}: Task={1} Time={2} COMPLETED", owningThreadPrefix, job.getJobId(), DateTimeHelper.now("datetime")));
setJobCompleted(output);
} catch(Exception ex) {
owningThread.setName(MessageFormat.format("{0}: Task={1} Time={2} FAILED: {3}", owningThreadPrefix, job.getJobId(), DateTimeHelper.now("datetime"), ExceptionHelper.getMessage(ex)));
setJobCompleted(output, ex);
throw ex;
} finally {
owningThread.setName(owningThreadPrefix);
}
return output;
}
/**
* Sets the job as successfully completed.
*
* @param serviceOutput The output of the service used to process the job.
* @throws Exception If a database error occurs.
*/
private void setJobCompleted(IData serviceOutput) throws Exception {
setJobCompleted(serviceOutput, null);
}
/**
* Sets the job as either successfully or unsuccessfully completed, depending on whether
* and exception is provided.
*
* @param serviceOutput The output of the service used to process the job.
* @param exception Optional exception encountered while processing the job.
* @throws Exception If a database error occurs.
*/
private void setJobCompleted(IData serviceOutput, Throwable exception) throws Exception {
IData input = IDataFactory.create();
IDataCursor cursor = input.getCursor();
IDataUtil.put(cursor, "taskid", job.getJobId());
IDataUtil.put(cursor, "queue", queue.getQueueName());
if (exception == null) {
IDataUtil.put(cursor, "status", "success");
} else {
IDataUtil.put(cursor, "status", "fail");
IDataUtil.put(cursor, "statusMsg", ExceptionHelper.getMessage(exception));
if (retryLimit > 0 && GuaranteedJobHelper.hasUnrecoverableErrors(job)) {
// abort the delivery job so it won't be retried
GuaranteedJobHelper.setRetryStrategy(job, 0, 1, 0);
GuaranteedJobHelper.log(job, "ERROR", "Delivery", "Delivery aborted", MessageFormat.format("Delivery task \"{0}\" on {1} queue \"{2}\" was aborted due to unrecoverable errors being encountered, and will not be retried", job.getJobId(), queue.getQueueType(), queue.getQueueName()));
} else {
GuaranteedJobHelper.setRetryStrategy(job, retryLimit, retryFactor, timeToWait);
}
}
IDataUtil.put(cursor, "timeDequeued", timeDequeued);
if (serviceOutput != null) IDataUtil.put(cursor, "serviceOutput", serviceOutput);
cursor.destroy();
Service.doInvoke(UPDATE_QUEUED_TASK_SERVICE_NAME, session, input);
GuaranteedJobHelper.retry(job, suspend);
}
}
/**
* Dequeues each task on the given Trading Networks delivery queue, and processes the task using the given service
* and input pipeline; if concurrency > 1, tasks will be processed by a thread pool whose size is equal to the
* desired concurrency, otherwise they will be processed on the current thread.
*
* @param queueName The name of the delivery queue whose queued jobs are to be processed.
* @param service The service to be invoked to process jobs on the given delivery queue.
* @param pipeline The input pipeline used when invoking the given service.
* @param concurrency If > 1, this is the number of threads used to process jobs simultaneously.
* @param retryLimit The number of retries this job should attempt.
* @param retryFactor The factor used to extend the time to wait on each retry.
* @param timeToWait The time in seconds to wait between each retry.
* @param threadPriority The thread priority used when processing tasks.
* @param daemonize If true, all threads will be marked as daemons and execution will not end until the JVM
* shuts down or the TN queue is disabled/suspended.
* @param ordered Whether delivery queue jobs should be processed in job creation datetime order.
* @param suspend Whether to suspend the delivery queue on job retry exhaustion.
* @throws ServiceException If an error is encountered while processing jobs.
*/
public static void each(String queueName, String service, IData pipeline, int concurrency, int retryLimit, int retryFactor, int timeToWait, int threadPriority, boolean daemonize, boolean ordered, boolean suspend) throws ServiceException {
if (queueName == null) throw new NullPointerException("queueName must not be null");
if (service == null) throw new NullPointerException("service must not be null");
DeliveryQueue queue = DeliveryQueueHelper.get(queueName);
if (queue == null) throw new ServiceException("Queue '" + queueName + "' does not exist");
each(queue, NSName.create(service), pipeline, concurrency, retryLimit, retryFactor, timeToWait, threadPriority, daemonize, ordered, suspend);
}
/**
* Dequeues each task on the given Trading Networks delivery queue, and processes the task using the given service
* and input pipeline; if concurrency > 1, tasks will be processed by a thread pool whose size is equal to the
* desired concurrency, otherwise they will be processed on the current thread.
*
* @param queue The delivery queue whose queued jobs are to be processed.
* @param service The service to be invoked to process jobs on the given delivery queue.
* @param pipeline The input pipeline used when invoking the given service.
* @param concurrency If > 1, this is the number of threads used to process jobs simultaneously.
* @param retryLimit The number of retries this job should attempt.
* @param retryFactor The factor used to extend the time to wait on each retry.
* @param timeToWait The time in seconds to wait between each retry.
* @param threadPriority The thread priority used when processing tasks.
* @param daemonize If true, all threads will be marked as daemons and execution will not end until the JVM
* shuts down or the TN queue is disabled/suspended.
* @param ordered Whether delivery queue jobs should be processed in job creation datetime order.
* @param suspend Whether to suspend the delivery queue on job retry exhaustion.
* @throws ServiceException If an error is encountered while processing jobs.
*/
public static void each(DeliveryQueue queue, NSName service, IData pipeline, int concurrency, int retryLimit, int retryFactor, int timeToWait, int threadPriority, boolean daemonize, boolean ordered, boolean suspend) throws ServiceException {
// normalize concurrency
if (concurrency <= 0) concurrency = 1;
String parentContext = IdentityHelper.generate();
// set owning thread priority and name
String previousThreadName = Thread.currentThread().getName();
int previousThreadPriority = Thread.currentThread().getPriority();
Thread.currentThread().setPriority(ThreadHelper.normalizePriority(threadPriority));
String threadName = getThreadPrefix(queue, parentContext);
if (concurrency > 1) {
threadName = threadName + SUPERVISOR_THREAD_SUFFIX;
} else {
threadName = threadName + WORKER_THREAD_SUFFIX;
}
Thread.currentThread().setName(threadName);
boolean invokedByTradingNetworks = invokedByTradingNetworks();
boolean queueEnabled = queue.isEnabled() || queue.isDraining();
Session session = Service.getSession();
ExecutorService executor = getExecutor(queue, concurrency, threadPriority, daemonize, InvokeState.getCurrentState(), parentContext);
long nextDeliveryQueueRefreshTime = System.currentTimeMillis() + WAIT_BETWEEN_DELIVERY_QUEUE_REFRESH_MILLISECONDS, sleepDuration = 0L;
try {
// while not interrupted and (not invoked by TN or queue is enabled): process queued jobs
while (!Thread.interrupted() && (!invokedByTradingNetworks || queueEnabled)) {
try {
if (sleepDuration > 0L) Thread.sleep(sleepDuration);
// set default sleep duration for when there are no pending jobs in queue or all threads are busy
sleepDuration = MIN_WAIT_BETWEEN_DELIVERY_QUEUE_POLLS_MILLISECONDS;
int activeCount = 0;
if (executor instanceof ThreadPoolExecutor) {
activeCount = ((ThreadPoolExecutor)executor).getActiveCount();
}
if (activeCount < concurrency) {
GuaranteedJob job = DeliveryQueueHelper.pop(queue, ordered);
if (job != null) {
// submit the job to the executor to be processed
executor.submit(new CallableGuaranteedJob(queue, job, service, session, pipeline, retryLimit, retryFactor, timeToWait, suspend));
sleepDuration = 0L; // poll for another job immediately, because the assumption is if there was one pending job then there is probably more
} else if (activeCount == 0) {
// no pending jobs, and thread pool is idle
if (daemonize) {
// calculate the next run time based on TN queue schedule so that we can sleep until that time
sleepDuration = untilNextRun(queue);
if (sleepDuration == 0L) {
// either the TN queue schedule was scheduled to run once or if it has now expired, so exit
break;
}
} else {
// if not daemon and all threads have finished and there are no more jobs, then exit
break;
}
}
}
// refresh the delivery queue settings from the database, in case they have changed
if (invokedByTradingNetworks && System.currentTimeMillis() >= nextDeliveryQueueRefreshTime) {
queue = DeliveryQueueHelper.refresh(queue);
queueEnabled = queue.isEnabled() || queue.isDraining();
nextDeliveryQueueRefreshTime = System.currentTimeMillis() + WAIT_BETWEEN_DELIVERY_QUEUE_REFRESH_MILLISECONDS;
}
} catch(ServiceException ex) {
// assume exception is recoverable, log it and then continue
ServerAPI.logError(ex);
} catch(InterruptedException ex) {
// exit if thread is interrupted
break;
}
}
} catch(Throwable ex) {
ExceptionHelper.raise(ex);
} finally {
// restore owning thread priority and name
Thread.currentThread().setPriority(previousThreadPriority);
Thread.currentThread().setName(previousThreadName);
executor.shutdown();
}
}
/**
* Returns an executor appropriate for the level of desired concurrency.
*
* @param queue The delivery queue to be processed.
* @param concurrency The level of desired concurrency.
* @param threadPriority The thread priority to be used by the returned executor.
* @param threadDaemon Whether the created threads should be daemons.
* @param invokeState The invoke state to be used by the thread pool.
* @param parentContext A unique parent context ID to be included in a thread name for diagnostics.
* @return An executor appropriate for the level of desired concurrency.
*/
private static ExecutorService getExecutor(DeliveryQueue queue, int concurrency, int threadPriority, boolean threadDaemon, InvokeState invokeState, String parentContext) {
ExecutorService executor;
if (concurrency <= 1) {
executor = new DirectExecutorService();
} else {
executor = new BlockingServerThreadPoolExecutor(concurrency, getThreadPrefix(queue, parentContext) + WORKER_THREAD_SUFFIX, null, threadPriority, threadDaemon, invokeState);
((BlockingServerThreadPoolExecutor)executor).allowCoreThreadTimeOut(true);
}
return executor;
}
/**
* Returns the thread name prefix to be used for this delivery queue.
*
* @param queue The queue which will be processed by threads with the returned prefix.
* @param parentContext A unique parent context ID to be included in a thread name for diagnostics.
* @return The thread name prefix used when processing the qiven queue.
*/
private static String getThreadPrefix(DeliveryQueue queue, String parentContext) {
String output;
int truncateLength = 25;
if (parentContext == null) {
output = MessageFormat.format("TundraTN/Queue \"{0}\"", StringHelper.truncate(queue.getQueueName(), truncateLength, true));
} else {
output = MessageFormat.format("TundraTN/Queue \"{0}\" ParentContext={1}", StringHelper.truncate(queue.getQueueName(), truncateLength, true), parentContext);
}
return output;
}
/**
* Returns true if the invocation call stack includes the WmTN/wm.tn.queuing:deliverBatch service.
*
* @return True if the invocation call stack includes the WmTN/wm.tn.queuing:deliverBatch service.
*/
private static boolean invokedByTradingNetworks() {
java.util.Iterator iterator = InvokeState.getCurrentState().getCallStack().iterator();
boolean result = false;
while(iterator.hasNext()) {
result = iterator.next().toString().equals(DELIVER_BATCH_SERVICE_NAME);
if (result) break;
}
return result;
}
/**
* Returns the number of milliseconds to wait until the next scheduled run of the given delivery queue.
*
* @param queue A delivery queue.
* @return The number of milliseconds to wait.
* @throws ServiceException If a datetime parsing error occurs.
*/
private static long untilNextRun(DeliveryQueue queue) throws ServiceException {
long next = nextRun(queue);
long now = System.currentTimeMillis();
return next > now ? next - now : 0L;
}
/**
* Parser for the datetimes to be parsed in a DeliverySchedule object.
*/
private static final SimpleDateFormat DELIVERY_SCHEDULE_DATETIME_PARSER = new SimpleDateFormat("yyyy/MM/ddHH:mm:ss");
/**
* Returns the time in milliseconds of the next scheduled run of the given delivery queue.
*
* @param queue A delivery queue.
* @return The time in milliseconds of the next scheduled run.
* @throws ServiceException If a datetime parsing error occurs.
*/
private static long nextRun(DeliveryQueue queue) throws ServiceException {
DeliverySchedule schedule = queue.getSchedule();
String type = schedule.getType();
long next = 0L, start = 0L, end = 0L;
try {
String endDate = schedule.getEndDate(), endTime = schedule.getEndTime();
if (endDate != null && endTime != null) {
end = DELIVERY_SCHEDULE_DATETIME_PARSER.parse(endDate + endTime).getTime();
}
boolean noOverlap = BooleanHelper.parse(schedule.getNoOverlap());
if (type.equals(DeliverySchedule.TYPE_REPEATING)) {
ScheduledTask.Simple repeatingTask = new ScheduledTask.Simple(Long.parseLong(schedule.getInterval()) * 1000L, noOverlap, start, end);
if (!repeatingTask.isExpired()) {
repeatingTask.calcNextTime();
next = repeatingTask.getNextRun();
}
} else if (type.equals(DeliverySchedule.TYPE_COMPLEX)) {
ScheduledTask.Mask complexTask = new ScheduledTask.Mask(Masks.buildLongMask(schedule.getMinutes()),
Masks.buildIntMask(schedule.getHours()),
Masks.buildIntMask(schedule.getDaysOfMonth()),
Masks.buildIntMask(schedule.getDaysOfWeek()),
Masks.buildIntMask(schedule.getMonths()),
noOverlap, start, end);
if (!complexTask.isExpired()) {
complexTask.calcNextTime();
next = complexTask.getNextRun();
}
}
} catch(ParseException ex) {
ExceptionHelper.raise(ex);
}
return next;
}
/**
* Returns whether bizdoc status should be changed or not.
*
* @param queue The queue check for status silence on.
* @return True if bizdoc status should not be changed, otherwise false.
*/
public static boolean getStatusSilence(DeliveryQueue queue) {
boolean statusSilence = false;
if (queue != null) {
DeliverySchedule schedule = queue.getSchedule();
if (schedule != null) {
IData pipeline = schedule.getInputs();
if (pipeline != null) {
IDataCursor cursor = pipeline.getCursor();
try {
statusSilence = BooleanHelper.parse(IDataUtil.getString(cursor, "$status.silence?"));
} finally {
cursor.destroy();
}
}
}
}
return statusSilence;
}
/**
* Converts the given Trading Networks delivery queue to an IData doc.
*
* @param input The queue to convert to an IData doc representation.
* @return An IData doc representation of the given queue.
* @throws ServiceException If a database error occurs.
*/
public static IData toIData(DeliveryQueue input) throws ServiceException {
if (input == null) return null;
IData output = IDataFactory.create();
IDataCursor cursor = output.getCursor();
IDataUtil.put(cursor, "name", input.getQueueName());
IDataUtil.put(cursor, "type", input.getQueueType());
IDataUtil.put(cursor, "status", input.getState());
IDataUtil.put(cursor, "length", "" + length(input));
cursor.destroy();
return output;
}
/**
* Converts the given list of Trading Networks delivery queues to an IData[] doc list.
*
* @param input The list of queues to convert to an IData[] doc list representation.
* @return An IData[] doc list representation of the given queues.
* @throws ServiceException If a database error occurs.
*/
public static IData[] toIDataArray(DeliveryQueue[] input) throws ServiceException {
if (input == null) return null;
IData[] output = new IData[input.length];
for (int i = 0; i < input.length; i++) {
output[i] = toIData(input[i]);
}
return output;
}
}
| src/main/java/permafrost/tundra/tn/delivery/DeliveryQueueHelper.java | /*
* The MIT License (MIT)
*
* Copyright (c) 2015 Lachlan Dowding
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package permafrost.tundra.tn.delivery;
import com.wm.app.b2b.server.InvokeState;
import com.wm.app.b2b.server.ServerAPI;
import com.wm.app.b2b.server.Service;
import com.wm.app.b2b.server.ServiceException;
import com.wm.app.b2b.server.Session;
import com.wm.app.b2b.server.scheduler.ScheduledTask;
import com.wm.app.tn.db.Datastore;
import com.wm.app.tn.db.QueueOperations;
import com.wm.app.tn.db.SQLWrappers;
import com.wm.app.tn.delivery.DeliveryQueue;
import com.wm.app.tn.delivery.DeliverySchedule;
import com.wm.app.tn.delivery.GuaranteedJob;
import com.wm.app.tn.doc.BizDocEnvelope;
import com.wm.data.IData;
import com.wm.data.IDataCursor;
import com.wm.data.IDataFactory;
import com.wm.data.IDataUtil;
import com.wm.lang.ns.NSName;
import com.wm.util.Masks;
import permafrost.tundra.data.IDataHelper;
import permafrost.tundra.lang.BooleanHelper;
import permafrost.tundra.lang.ExceptionHelper;
import permafrost.tundra.lang.IdentityHelper;
import permafrost.tundra.lang.StringHelper;
import permafrost.tundra.lang.ThreadHelper;
import permafrost.tundra.server.BlockingServerThreadPoolExecutor;
import permafrost.tundra.time.DateTimeHelper;
import permafrost.tundra.tn.document.BizDocEnvelopeHelper;
import permafrost.tundra.tn.profile.ProfileCache;
import permafrost.tundra.util.concurrent.DirectExecutorService;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.text.MessageFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ThreadPoolExecutor;
/**
* A collection of convenience methods for working with Trading Networks delivery queues.
*/
public final class DeliveryQueueHelper {
/**
* SQL statement to select head of a delivery queue in job creation datetime order.
*/
private static final String SELECT_NEXT_DELIVERY_JOB_ORDERED_SQL = "SELECT JobID FROM DeliveryJob WHERE QueueName = ? AND JobStatus = 'QUEUED' AND TimeCreated = (SELECT MIN(TimeCreated) FROM DeliveryJob WHERE QueueName = ? AND JobStatus = 'QUEUED') AND TimeUpdated <= ?";
/**
* SQL statement to select head of a delivery queue in indeterminate order.
*/
private static final String SELECT_NEXT_DELIVERY_JOB_UNORDERED_SQL = "SELECT JobID FROM DeliveryJob WHERE QueueName = ? AND JobStatus = 'QUEUED' AND TimeCreated = (SELECT MIN(TimeCreated) FROM DeliveryJob WHERE QueueName = ? AND JobStatus = 'QUEUED' AND TimeUpdated <= ?)";
/**
* The age a delivery job must be before it is eligible to be processed.
*/
private static final long DELIVERY_JOB_AGE_THRESHOLD_MILLISECONDS = 750L;
/**
* The name of the service that Trading Networks uses to invoke delivery queue processing services.
*/
private static final String DELIVER_BATCH_SERVICE_NAME = "wm.tn.queuing:deliverBatch";
/**
* The name of the service used to update the completion status of a delivery queue job.
*/
private static final NSName UPDATE_QUEUED_TASK_SERVICE_NAME = NSName.create("wm.tn.queuing:updateQueuedTask");
/**
* The name of the service used to update a delivery queue.
*/
private static final NSName UPDATE_QUEUE_SERVICE_NAME = NSName.create("wm.tn.queuing:updateQueue");
/**
* The minimum wait between each poll of a delivery queue for more jobs.
*/
private static final long MIN_WAIT_BETWEEN_DELIVERY_QUEUE_POLLS_MILLISECONDS = 1L;
/**
* The wait between each refresh of a delivery queue settings from the database.
*/
private static final long WAIT_BETWEEN_DELIVERY_QUEUE_REFRESH_MILLISECONDS = 5L * 1000L;
/**
* The suffix used on worker thread names.
*/
private static final String WORKER_THREAD_SUFFIX = ": Worker";
/**
* The suffix used on supervisor thread names.
*/
private static final String SUPERVISOR_THREAD_SUFFIX = ": Supervisor";
/**
* The bizdoc user status to use when a job is dequeued.
*/
private static final String DEQUEUED_USER_STATUS = "DEQUEUED";
/**
* Disallow instantiation of this class.
*/
private DeliveryQueueHelper() {}
/**
* Returns the Trading Networks delivery queue associated with the given name.
*
* @param queueName The name of the queue to return.
* @return The delivery queue with the given name.
* @throws ServiceException If a database error occurs.
*/
public static DeliveryQueue get(String queueName) throws ServiceException {
if (queueName == null) return null;
DeliveryQueue queue = null;
try {
queue = QueueOperations.selectByName(queueName);
} catch(SQLException ex) {
ExceptionHelper.raise(ex);
} catch(IOException ex) {
ExceptionHelper.raise(ex);
}
return queue;
}
/**
* Refreshes the given Trading Networks delivery queue from the database.
*
* @param queue The queue to be refreshed.
* @return The given queue, refreshed from the database.
* @throws ServiceException If a database error occurs.
*/
public static DeliveryQueue refresh(DeliveryQueue queue) throws ServiceException {
return get(queue.getQueueName());
}
/**
* Returns a list of all registered Trading Networks delivery queues.
*
* @return A list of all registered Trading Networks delivery queues.
* @throws ServiceException If a database error occurs.
*/
public static DeliveryQueue[] list() throws ServiceException {
DeliveryQueue[] output = null;
try {
output = QueueOperations.select(null);
} catch(SQLException ex) {
ExceptionHelper.raise(ex);
} catch(IOException ex) {
ExceptionHelper.raise(ex);
}
return output;
}
/**
* Enables the delivery of the given Trading Networks delivery queue.
*
* @param queue The queue to enable delivery on.
* @throws ServiceException If a database error occurs.
*/
public static void enable(DeliveryQueue queue) throws ServiceException {
if (queue == null) return;
queue.setState(DeliveryQueue.STATE_ENABLED);
save(queue);
}
/**
* Disables the delivery of the given Trading Networks delivery queue.
*
* @param queue The queue to enable delivery on.
* @throws ServiceException If a database error occurs.
*/
public static void disable(DeliveryQueue queue) throws ServiceException {
if (queue == null) return;
queue.setState(DeliveryQueue.STATE_DISABLED);
save(queue);
}
/**
* Drains the delivery of the given Trading Networks delivery queue.
*
* @param queue The queue to enable delivery on.
* @throws ServiceException If a database error occurs.
*/
public static void drain(DeliveryQueue queue) throws ServiceException {
if (queue == null) return;
queue.setState(DeliveryQueue.STATE_DRAINING);
save(queue);
}
/**
* Suspends the delivery of the given Trading Networks delivery queue.
*
* @param queue The queue to enable delivery on.
* @throws ServiceException If a database error occurs.
*/
public static void suspend(DeliveryQueue queue) throws ServiceException {
if (queue == null) return;
queue.setState(DeliveryQueue.STATE_SUSPENDED);
save(queue);
}
/**
* Returns the number of jobs currently queued in the given Trading Networks delivery queue.
*
* @param queue The queue to return the length of.
* @return The length of the given queue, which is the number of delivery jobs with a status
* of QUEUED or DELIVERING.
* @throws ServiceException If a database error occurs.
*/
public static int length(DeliveryQueue queue) throws ServiceException {
int length = 0;
if (queue != null) {
try {
String[] jobs = QueueOperations.getQueuedJobs(queue.getQueueName());
if (jobs != null) length = jobs.length;
} catch(SQLException ex) {
ExceptionHelper.raise(ex);
}
}
return length;
}
/**
* Updates the given Trading Networks delivery queue with any changes that may have occurred.
*
* @param queue The queue whose changes are to be saved.
* @throws ServiceException If a database error occurs.
*/
public static void save(DeliveryQueue queue) throws ServiceException {
if (queue == null) return;
try {
IData pipeline = IDataFactory.create();
IDataCursor cursor = pipeline.getCursor();
IDataUtil.put(cursor, "queue", queue);
cursor.destroy();
Service.doInvoke(UPDATE_QUEUE_SERVICE_NAME, pipeline);
} catch(Exception ex) {
ExceptionHelper.raise(ex);
}
}
/**
* Returns the head of the given delivery queue without dequeuing it.
*
* @param queue The delivery queue whose head job is to be returned.
* @param ordered Whether jobs should be dequeued in strict creation datetime first in first out (FIFO) order.
* @return The job at the head of the given queue, or null if the queue is empty.
* @throws ServiceException If a database error occurs.
*/
public static GuaranteedJob peek(DeliveryQueue queue, boolean ordered) throws ServiceException {
if (queue == null) return null;
Connection connection = null;
PreparedStatement statement = null;
ResultSet results = null;
GuaranteedJob job = null;
try {
connection = Datastore.getConnection();
statement = connection.prepareStatement(ordered ? SELECT_NEXT_DELIVERY_JOB_ORDERED_SQL : SELECT_NEXT_DELIVERY_JOB_UNORDERED_SQL);
statement.clearParameters();
String queueName = queue.getQueueName();
SQLWrappers.setChoppedString(statement, 1, queueName, "DeliveryQueue.QueueName");
SQLWrappers.setChoppedString(statement, 2, queueName, "DeliveryQueue.QueueName");
SQLWrappers.setTimestamp(statement, 3, new Timestamp(System.currentTimeMillis() - DELIVERY_JOB_AGE_THRESHOLD_MILLISECONDS));
results = statement.executeQuery();
if (results.next()) {
job = GuaranteedJobHelper.get(results.getString(1));
}
connection.commit();
} catch (SQLException ex) {
connection = Datastore.handleSQLException(connection, ex);
ExceptionHelper.raise(ex);
} finally {
SQLWrappers.close(results);
SQLWrappers.close(statement);
Datastore.releaseConnection(connection);
}
return job;
}
/**
* Dequeues the job at the head of the given delivery queue.
*
* @param queue The delivery queue to dequeue the head job from.
* @param ordered Whether jobs should be dequeued in strict creation datetime first in first out (FIFO) order.
* @return The dequeued job that was at the head of the given queue, or null if queue is empty.
* @throws ServiceException If a database error occurs.
*/
public static GuaranteedJob pop(DeliveryQueue queue, boolean ordered) throws ServiceException {
GuaranteedJob job = peek(queue, ordered);
GuaranteedJobHelper.setDelivering(job);
return job;
}
/**
* Callable for invoking a given service against a given job.
*/
private static class CallableGuaranteedJob implements Callable<IData> {
/**
* The job against which the service will be invoked.
*/
private GuaranteedJob job;
/**
* The delivery queue from which the job was dequeued.
*/
private DeliveryQueue queue;
/**
* The service to be invoked.
*/
private NSName service;
/**
* The pipeline the service is invoked with.
*/
private IData pipeline;
/**
* The session the service is invoked under.
*/
private Session session;
/**
* The retry settings to be used when retrying the job.
*/
private int retryLimit, retryFactor, timeToWait;
/**
* Whether the deliver queue should be suspended on retry exhaustion.
*/
private boolean suspend;
/**
* Whether the owning bizdoc's status should be changed to reflect job success/failure.
*/
private boolean statusSilence;
/**
* The time the job was dequeued.
*/
private long timeDequeued;
/**
* Creates a new CallableGuaranteedJob which when called invokes the given service against the given job.
*
* @param job The job to be processed.
* @param service The service to be invoked to process the given job.
* @param session The session used when invoking the given service.
* @param pipeline The input pipeline used when invoking the given service.
* @param retryLimit The number of retries this job should attempt.
* @param retryFactor The factor used to extend the time to wait on each retry.
* @param timeToWait The time in seconds to wait between each retry.
* @param suspend Whether to suspend the delivery queue on job retry exhaustion.
*/
public CallableGuaranteedJob(DeliveryQueue queue, GuaranteedJob job, String service, Session session, IData pipeline, int retryLimit, int retryFactor, int timeToWait, boolean suspend) {
this(queue, job, service == null ? null : NSName.create(service), session, pipeline, retryLimit, retryFactor, timeToWait, suspend);
}
/**
* Creates a new CallableGuaranteedJob which when called invokes the given service against the given job.
*
* @param job The job to be processed.
* @param service The service to be invoked to process the given job.
* @param session The session used when invoking the given service.
* @param pipeline The input pipeline used when invoking the given service.
* @param retryLimit The number of retries this job should attempt.
* @param retryFactor The factor used to extend the time to wait on each retry.
* @param timeToWait The time in seconds to wait between each retry.
* @param suspend Whether to suspend the delivery queue on job retry exhaustion.
*/
public CallableGuaranteedJob(DeliveryQueue queue, GuaranteedJob job, NSName service, Session session, IData pipeline, int retryLimit, int retryFactor, int timeToWait, boolean suspend) {
if (queue == null) throw new NullPointerException("queue must not be null");
if (job == null) throw new NullPointerException("job must not be null");
if (service == null) throw new NullPointerException("service must not be null");
this.queue = queue;
this.job = job;
this.service = service;
this.session = session;
this.pipeline = pipeline == null ? IDataFactory.create() : IDataHelper.duplicate(pipeline);
this.retryLimit = retryLimit;
this.retryFactor = retryFactor;
this.timeToWait = timeToWait;
this.suspend = suspend;
this.statusSilence = getStatusSilence(queue);
}
/**
* Invokes the provided service with the provided pipeline and session against the job.
*
* @return The output pipeline returned by the invocation.
* @throws Exception If the service encounters an error.
*/
public IData call() throws Exception {
IData output = null;
Thread owningThread = Thread.currentThread();
String owningThreadPrefix = owningThread.getName();
try {
timeDequeued = System.currentTimeMillis();
BizDocEnvelope bizdoc = job.getBizDocEnvelope();
owningThread.setName(MessageFormat.format("{0}: Task={1} Time={2} STARTED", owningThreadPrefix, job.getJobId(), DateTimeHelper.now("datetime")));
if (bizdoc != null) {
BizDocEnvelopeHelper.setStatus(job.getBizDocEnvelope(), null, DEQUEUED_USER_STATUS, statusSilence);
}
GuaranteedJobHelper.log(job, "MESSAGE", "Processing", MessageFormat.format("Dequeued from {0} queue \"{1}\"", queue.getQueueType(), queue.getQueueName()), MessageFormat.format("Service \"{0}\" attempting to process document", service.getFullName()));
IDataCursor cursor = pipeline.getCursor();
IDataUtil.put(cursor, "$task", job);
if (bizdoc != null) {
bizdoc = BizDocEnvelopeHelper.get(bizdoc.getInternalId(), true);
IDataUtil.put(cursor, "bizdoc", bizdoc);
IDataUtil.put(cursor, "sender", ProfileCache.getInstance().get(bizdoc.getSenderId()));
IDataUtil.put(cursor, "receiver", ProfileCache.getInstance().get(bizdoc.getReceiverId()));
}
cursor.destroy();
output = Service.doInvoke(service, session, pipeline);
owningThread.setName(MessageFormat.format("{0}: Task={1} Time={2} COMPLETED", owningThreadPrefix, job.getJobId(), DateTimeHelper.now("datetime")));
setJobCompleted(output);
} catch(Exception ex) {
owningThread.setName(MessageFormat.format("{0}: Task={1} Time={2} FAILED: {3}", owningThreadPrefix, job.getJobId(), DateTimeHelper.now("datetime"), ExceptionHelper.getMessage(ex)));
setJobCompleted(output, ex);
throw ex;
} finally {
owningThread.setName(owningThreadPrefix);
}
return output;
}
/**
* Sets the job as successfully completed.
*
* @param serviceOutput The output of the service used to process the job.
* @throws Exception If a database error occurs.
*/
private void setJobCompleted(IData serviceOutput) throws Exception {
setJobCompleted(serviceOutput, null);
}
/**
* Sets the job as either successfully or unsuccessfully completed, depending on whether
* and exception is provided.
*
* @param serviceOutput The output of the service used to process the job.
* @param exception Optional exception encountered while processing the job.
* @throws Exception If a database error occurs.
*/
private void setJobCompleted(IData serviceOutput, Throwable exception) throws Exception {
IData input = IDataFactory.create();
IDataCursor cursor = input.getCursor();
IDataUtil.put(cursor, "taskid", job.getJobId());
IDataUtil.put(cursor, "queue", queue.getQueueName());
if (exception == null) {
IDataUtil.put(cursor, "status", "success");
} else {
IDataUtil.put(cursor, "status", "fail");
IDataUtil.put(cursor, "statusMsg", ExceptionHelper.getMessage(exception));
if (retryLimit > 0 && GuaranteedJobHelper.hasUnrecoverableErrors(job)) {
// abort the delivery job so it won't be retried
GuaranteedJobHelper.setRetryStrategy(job, 0, 1, 0);
GuaranteedJobHelper.log(job, "ERROR", "Delivery", "Delivery aborted", MessageFormat.format("Delivery task \"{0}\" on {1} queue \"{2}\" was aborted due to unrecoverable errors being encountered, and will not be retried", job.getJobId(), queue.getQueueType(), queue.getQueueName()));
} else {
GuaranteedJobHelper.setRetryStrategy(job, retryLimit, retryFactor, timeToWait);
}
}
IDataUtil.put(cursor, "timeDequeued", timeDequeued);
if (serviceOutput != null) IDataUtil.put(cursor, "serviceOutput", serviceOutput);
cursor.destroy();
Service.doInvoke(UPDATE_QUEUED_TASK_SERVICE_NAME, session, input);
GuaranteedJobHelper.retry(job, suspend);
}
}
/**
* Dequeues each task on the given Trading Networks delivery queue, and processes the task using the given service
* and input pipeline; if concurrency > 1, tasks will be processed by a thread pool whose size is equal to the
* desired concurrency, otherwise they will be processed on the current thread.
*
* @param queueName The name of the delivery queue whose queued jobs are to be processed.
* @param service The service to be invoked to process jobs on the given delivery queue.
* @param pipeline The input pipeline used when invoking the given service.
* @param concurrency If > 1, this is the number of threads used to process jobs simultaneously.
* @param retryLimit The number of retries this job should attempt.
* @param retryFactor The factor used to extend the time to wait on each retry.
* @param timeToWait The time in seconds to wait between each retry.
* @param threadPriority The thread priority used when processing tasks.
* @param daemon If true, all threads will be marked as daemons and execution will not end until the JVM
* shuts down or the TN queue is disabled/suspended.
* @param ordered Whether delivery queue jobs should be processed in job creation datetime order.
* @param suspend Whether to suspend the delivery queue on job retry exhaustion.
* @throws ServiceException If an error is encountered while processing jobs.
*/
public static void each(String queueName, String service, IData pipeline, int concurrency, int retryLimit, int retryFactor, int timeToWait, int threadPriority, boolean daemon, boolean ordered, boolean suspend) throws ServiceException {
if (queueName == null) throw new NullPointerException("queueName must not be null");
if (service == null) throw new NullPointerException("service must not be null");
DeliveryQueue queue = DeliveryQueueHelper.get(queueName);
if (queue == null) throw new ServiceException("Queue '" + queueName + "' does not exist");
each(queue, NSName.create(service), pipeline, concurrency, retryLimit, retryFactor, timeToWait, threadPriority, daemon, ordered, suspend);
}
/**
* Dequeues each task on the given Trading Networks delivery queue, and processes the task using the given service
* and input pipeline; if concurrency > 1, tasks will be processed by a thread pool whose size is equal to the
* desired concurrency, otherwise they will be processed on the current thread.
*
* @param queue The delivery queue whose queued jobs are to be processed.
* @param service The service to be invoked to process jobs on the given delivery queue.
* @param pipeline The input pipeline used when invoking the given service.
* @param concurrency If > 1, this is the number of threads used to process jobs simultaneously.
* @param retryLimit The number of retries this job should attempt.
* @param retryFactor The factor used to extend the time to wait on each retry.
* @param timeToWait The time in seconds to wait between each retry.
* @param threadPriority The thread priority used when processing tasks.
* @param daemon If true, all threads will be marked as daemons and execution will not end until the JVM
* shuts down or the TN queue is disabled/suspended.
* @param ordered Whether delivery queue jobs should be processed in job creation datetime order.
* @param suspend Whether to suspend the delivery queue on job retry exhaustion.
* @throws ServiceException If an error is encountered while processing jobs.
*/
public static void each(DeliveryQueue queue, NSName service, IData pipeline, int concurrency, int retryLimit, int retryFactor, int timeToWait, int threadPriority, boolean daemon, boolean ordered, boolean suspend) throws ServiceException {
// normalize concurrency
if (concurrency <= 0) concurrency = 1;
String parentContext = IdentityHelper.generate();
// set owning thread priority and name
String previousThreadName = Thread.currentThread().getName();
int previousThreadPriority = Thread.currentThread().getPriority();
Thread.currentThread().setPriority(ThreadHelper.normalizePriority(threadPriority));
String threadName = getThreadPrefix(queue, parentContext);
if (concurrency > 1) {
threadName = threadName + SUPERVISOR_THREAD_SUFFIX;
} else {
threadName = threadName + WORKER_THREAD_SUFFIX;
}
Thread.currentThread().setName(threadName);
boolean invokedByTradingNetworks = invokedByTradingNetworks();
boolean queueEnabled = queue.isEnabled() || queue.isDraining();
Session session = Service.getSession();
ExecutorService executor = getExecutor(queue, concurrency, threadPriority, daemon, InvokeState.getCurrentState(), parentContext);
long nextDeliveryQueueRefreshTime = System.currentTimeMillis() + WAIT_BETWEEN_DELIVERY_QUEUE_REFRESH_MILLISECONDS, sleepDuration = 0L;
try {
// while not interrupted and (not invoked by TN or queue is enabled): process queued jobs
while (!Thread.interrupted() && (!invokedByTradingNetworks || queueEnabled)) {
try {
if (sleepDuration > 0L) Thread.sleep(sleepDuration);
// set default sleep duration for when there are no pending jobs in queue or all threads are busy
sleepDuration = MIN_WAIT_BETWEEN_DELIVERY_QUEUE_POLLS_MILLISECONDS;
int activeCount = 0;
if (executor instanceof ThreadPoolExecutor) {
activeCount = ((ThreadPoolExecutor)executor).getActiveCount();
}
if (activeCount < concurrency) {
GuaranteedJob job = DeliveryQueueHelper.pop(queue, ordered);
if (job != null) {
// submit the job to the executor to be processed
executor.submit(new CallableGuaranteedJob(queue, job, service, session, pipeline, retryLimit, retryFactor, timeToWait, suspend));
sleepDuration = 0L; // poll for another job immediately, because the assumption is if there was one pending job then there is probably more
} else if (activeCount == 0) {
// no pending jobs, and thread pool is idle
if (daemon) {
// calculate the next run time based on TN queue schedule so that we can sleep until that time
sleepDuration = untilNextRun(queue);
if (sleepDuration == 0L) {
// either the TN queue schedule was scheduled to run once or if it has now expired, so exit
break;
}
} else {
// if not daemon and all threads have finished and there are no more jobs, then exit
break;
}
}
}
// refresh the delivery queue settings from the database, in case they have changed
if (invokedByTradingNetworks && System.currentTimeMillis() >= nextDeliveryQueueRefreshTime) {
queue = DeliveryQueueHelper.refresh(queue);
queueEnabled = queue.isEnabled() || queue.isDraining();
nextDeliveryQueueRefreshTime = System.currentTimeMillis() + WAIT_BETWEEN_DELIVERY_QUEUE_REFRESH_MILLISECONDS;
}
} catch(ServiceException ex) {
// assume exception is recoverable, log it and then continue
ServerAPI.logError(ex);
} catch(InterruptedException ex) {
// exit if thread is interrupted
break;
}
}
} catch(Throwable ex) {
ExceptionHelper.raise(ex);
} finally {
// restore owning thread priority and name
Thread.currentThread().setPriority(previousThreadPriority);
Thread.currentThread().setName(previousThreadName);
executor.shutdown();
}
}
/**
* Returns an executor appropriate for the level of desired concurrency.
*
* @param queue The delivery queue to be processed.
* @param concurrency The level of desired concurrency.
* @param threadPriority The thread priority to be used by the returned executor.
* @param daemon Whether the created threads should be daemon threads.
* @param invokeState The invoke state to be used by the thread pool.
* @param parentContext A unique parent context ID to be included in a thread name for diagnostics.
* @return An executor appropriate for the level of desired concurrency.
*/
private static ExecutorService getExecutor(DeliveryQueue queue, int concurrency, int threadPriority, boolean daemon, InvokeState invokeState, String parentContext) {
ExecutorService executor;
if (concurrency <= 1) {
executor = new DirectExecutorService();
} else {
executor = new BlockingServerThreadPoolExecutor(concurrency, getThreadPrefix(queue, parentContext) + WORKER_THREAD_SUFFIX, null, threadPriority, daemon, invokeState);
((BlockingServerThreadPoolExecutor)executor).allowCoreThreadTimeOut(true);
}
return executor;
}
/**
* Returns the thread name prefix to be used for this delivery queue.
*
* @param queue The queue which will be processed by threads with the returned prefix.
* @param parentContext A unique parent context ID to be included in a thread name for diagnostics.
* @return The thread name prefix used when processing the qiven queue.
*/
private static String getThreadPrefix(DeliveryQueue queue, String parentContext) {
String output;
int truncateLength = 25;
if (parentContext == null) {
output = MessageFormat.format("TundraTN/Queue \"{0}\"", StringHelper.truncate(queue.getQueueName(), truncateLength, true));
} else {
output = MessageFormat.format("TundraTN/Queue \"{0}\" ParentContext={1}", StringHelper.truncate(queue.getQueueName(), truncateLength, true), parentContext);
}
return output;
}
/**
* Returns true if the invocation call stack includes the WmTN/wm.tn.queuing:deliverBatch service.
*
* @return True if the invocation call stack includes the WmTN/wm.tn.queuing:deliverBatch service.
*/
private static boolean invokedByTradingNetworks() {
java.util.Iterator iterator = InvokeState.getCurrentState().getCallStack().iterator();
boolean result = false;
while(iterator.hasNext()) {
result = iterator.next().toString().equals(DELIVER_BATCH_SERVICE_NAME);
if (result) break;
}
return result;
}
/**
* Returns the number of milliseconds to wait until the next scheduled run of the given delivery queue.
*
* @param queue A delivery queue.
* @return The number of milliseconds to wait.
* @throws ServiceException If a datetime parsing error occurs.
*/
private static long untilNextRun(DeliveryQueue queue) throws ServiceException {
long next = nextRun(queue);
long now = System.currentTimeMillis();
return next > now ? next - now : 0L;
}
/**
* Parser for the datetimes to be parsed in a DeliverySchedule object.
*/
private static final SimpleDateFormat DELIVERY_SCHEDULE_DATETIME_PARSER = new SimpleDateFormat("yyyy/MM/ddHH:mm:ss");
/**
* Returns the time in milliseconds of the next scheduled run of the given delivery queue.
*
* @param queue A delivery queue.
* @return The time in milliseconds of the next scheduled run.
* @throws ServiceException If a datetime parsing error occurs.
*/
private static long nextRun(DeliveryQueue queue) throws ServiceException {
DeliverySchedule schedule = queue.getSchedule();
String type = schedule.getType();
long next = 0L, start = 0L, end = 0L;
try {
String endDate = schedule.getEndDate(), endTime = schedule.getEndTime();
if (endDate != null && endTime != null) {
end = DELIVERY_SCHEDULE_DATETIME_PARSER.parse(endDate + endTime).getTime();
}
boolean noOverlap = BooleanHelper.parse(schedule.getNoOverlap());
if (type.equals(DeliverySchedule.TYPE_REPEATING)) {
ScheduledTask.Simple repeatingTask = new ScheduledTask.Simple(Long.parseLong(schedule.getInterval()) * 1000L, noOverlap, start, end);
if (!repeatingTask.isExpired()) {
repeatingTask.calcNextTime();
next = repeatingTask.getNextRun();
}
} else if (type.equals(DeliverySchedule.TYPE_COMPLEX)) {
ScheduledTask.Mask complexTask = new ScheduledTask.Mask(Masks.buildLongMask(schedule.getMinutes()),
Masks.buildIntMask(schedule.getHours()),
Masks.buildIntMask(schedule.getDaysOfMonth()),
Masks.buildIntMask(schedule.getDaysOfWeek()),
Masks.buildIntMask(schedule.getMonths()),
noOverlap, start, end);
if (!complexTask.isExpired()) {
complexTask.calcNextTime();
next = complexTask.getNextRun();
}
}
} catch(ParseException ex) {
ExceptionHelper.raise(ex);
}
return next;
}
/**
* Returns whether bizdoc status should be changed or not.
*
* @param queue The queue check for status silence on.
* @return True if bizdoc status should not be changed, otherwise false.
*/
public static boolean getStatusSilence(DeliveryQueue queue) {
boolean statusSilence = false;
if (queue != null) {
DeliverySchedule schedule = queue.getSchedule();
if (schedule != null) {
IData pipeline = schedule.getInputs();
if (pipeline != null) {
IDataCursor cursor = pipeline.getCursor();
try {
statusSilence = BooleanHelper.parse(IDataUtil.getString(cursor, "$status.silence?"));
} finally {
cursor.destroy();
}
}
}
}
return statusSilence;
}
/**
* Converts the given Trading Networks delivery queue to an IData doc.
*
* @param input The queue to convert to an IData doc representation.
* @return An IData doc representation of the given queue.
* @throws ServiceException If a database error occurs.
*/
public static IData toIData(DeliveryQueue input) throws ServiceException {
if (input == null) return null;
IData output = IDataFactory.create();
IDataCursor cursor = output.getCursor();
IDataUtil.put(cursor, "name", input.getQueueName());
IDataUtil.put(cursor, "type", input.getQueueType());
IDataUtil.put(cursor, "status", input.getState());
IDataUtil.put(cursor, "length", "" + length(input));
cursor.destroy();
return output;
}
/**
* Converts the given list of Trading Networks delivery queues to an IData[] doc list.
*
* @param input The list of queues to convert to an IData[] doc list representation.
* @return An IData[] doc list representation of the given queues.
* @throws ServiceException If a database error occurs.
*/
public static IData[] toIDataArray(DeliveryQueue[] input) throws ServiceException {
if (input == null) return null;
IData[] output = new IData[input.length];
for (int i = 0; i < input.length; i++) {
output[i] = toIData(input[i]);
}
return output;
}
}
| Change DeliveryQueueHelper.each daemon argument name to daemonize
| src/main/java/permafrost/tundra/tn/delivery/DeliveryQueueHelper.java | Change DeliveryQueueHelper.each daemon argument name to daemonize |
|
Java | mit | c5ab6db9e99ce0c517ce8d6cde5938a60110289e | 0 | infakt/FloatingActionButton | package com.melnykov.fab;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.LayerDrawable;
import android.graphics.drawable.ShapeDrawable;
import android.graphics.drawable.StateListDrawable;
import android.graphics.drawable.shapes.OvalShape;
import android.os.Build;
import android.os.Handler;
import android.os.Message;
import android.os.Parcel;
import android.os.Parcelable;
import android.support.annotation.NonNull;
import android.util.AttributeSet;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.AccelerateDecelerateInterpolator;
import android.view.animation.Interpolator;
import android.widget.AbsListView;
import android.widget.ImageButton;
import java.lang.reflect.Field;
import pl.infakt.infakt.util.CombineOnScrollListener;
/**
* Android Google+ like floating action button which reacts on the attached list view scrolling events.
*
* @author Oleksandr Melnykov
*/
public class FloatingActionButton extends ImageButton {
// @IntDef({TYPE_NORMAL, TYPE_MINI})
public @interface TYPE {
}
public static final int TYPE_NORMAL = 0;
public static final int TYPE_MINI = 1;
protected AbsListView mListView;
private int mScrollY;
private boolean mVisible;
private int mColorNormal;
private int mColorPressed;
private boolean mShadow;
private int mType;
private int additionalBottomMargin;
private final ScrollSettleHandler mScrollSettleHandler = new ScrollSettleHandler();
private final Interpolator mInterpolator = new AccelerateDecelerateInterpolator();
private final AbsListView.OnScrollListener mOnScrollListener = new AbsListView.OnScrollListener() {
@Override
public void onScrollStateChanged(AbsListView view, int scrollState) {
}
@Override
public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) {
int newScrollY = getListViewScrollY();
if (newScrollY == mScrollY) {
return;
}
if (newScrollY > mScrollY) {
// Scrolling up
hide();
} else if (newScrollY < mScrollY) {
// Scrolling down
show();
}
mScrollY = newScrollY;
}
};
public FloatingActionButton(Context context) {
this(context, null);
}
public FloatingActionButton(Context context, AttributeSet attrs) {
super(context, attrs);
init(context, attrs);
}
public FloatingActionButton(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init(context, attrs);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int size = getDimension(
mType == TYPE_NORMAL ? R.dimen.fab_size_normal : R.dimen.fab_size_mini);
if (mShadow) {
int shadowSize = getDimension(R.dimen.fab_shadow_size);
size += shadowSize * 2;
}
setMeasuredDimension(size, size);
}
@Override
public Parcelable onSaveInstanceState() {
Parcelable superState = super.onSaveInstanceState();
SavedState savedState = new SavedState(superState);
savedState.mScrollY = mScrollY;
return savedState;
}
@Override
public void onRestoreInstanceState(Parcelable state) {
if (state instanceof SavedState) {
SavedState savedState = (SavedState) state;
mScrollY = savedState.mScrollY;
super.onRestoreInstanceState(savedState.getSuperState());
} else {
super.onRestoreInstanceState(state);
}
}
private void init(Context context, AttributeSet attributeSet) {
mVisible = true;
mColorNormal = getColor(android.R.color.holo_blue_dark);
mColorPressed = getColor(android.R.color.holo_blue_light);
mType = TYPE_NORMAL;
mShadow = true;
if (attributeSet != null) {
initAttributes(context, attributeSet);
}
updateBackground();
}
private void initAttributes(Context context, AttributeSet attributeSet) {
TypedArray attr = getTypedArray(context, attributeSet, R.styleable.FloatingActionButton);
if (attr != null) {
try {
mColorNormal = attr.getColor(R.styleable.FloatingActionButton_fab_colorNormal,
getColor(android.R.color.holo_blue_dark));
mColorPressed = attr.getColor(R.styleable.FloatingActionButton_fab_colorPressed,
getColor(android.R.color.holo_blue_light));
mShadow = attr.getBoolean(R.styleable.FloatingActionButton_fab_shadow, true);
mType = attr.getInt(R.styleable.FloatingActionButton_fab_type, TYPE_NORMAL);
} finally {
attr.recycle();
}
}
}
private void updateBackground() {
StateListDrawable drawable = new StateListDrawable();
drawable.addState(new int[]{android.R.attr.state_pressed}, createDrawable(mColorPressed));
drawable.addState(new int[]{}, createDrawable(mColorNormal));
setBackgroundCompat(drawable);
}
private Drawable createDrawable(int color) {
ShapeDrawable shapeDrawable;
if (isInEditMode()) {
shapeDrawable = new ShapeDrawable();
} else {
OvalShape ovalShape = new OvalShape();
shapeDrawable = new ShapeDrawable(ovalShape);
}
shapeDrawable.getPaint().setColor(color);
if (mShadow) {
LayerDrawable layerDrawable = new LayerDrawable(
new Drawable[]{getResources().getDrawable(R.drawable.shadow),
shapeDrawable});
int shadowSize = getDimension(
mType == TYPE_NORMAL ? R.dimen.fab_shadow_size : R.dimen.fab_mini_shadow_size);
layerDrawable.setLayerInset(1, shadowSize, shadowSize, shadowSize, shadowSize);
return layerDrawable;
} else {
return shapeDrawable;
}
}
private TypedArray getTypedArray(Context context, AttributeSet attributeSet, int[] attr) {
return context.obtainStyledAttributes(attributeSet, attr, 0, 0);
}
private int getColor(int id) {
return getResources().getColor(id);
}
private int getDimension(int id) {
return getResources().getDimensionPixelSize(id);
}
@SuppressWarnings("deprecation")
@SuppressLint("NewApi")
private void setBackgroundCompat(Drawable drawable) {
if (Build.VERSION.SDK_INT >= 16) {
setBackground(drawable);
} else {
setBackgroundDrawable(drawable);
}
}
protected int getListViewScrollY() {
View topChild = mListView.getChildAt(0);
return topChild == null ? 0 : mListView.getFirstVisiblePosition() * topChild.getHeight() -
topChild.getTop();
}
private int getMarginBottom() {
int marginBottom = 0;
final ViewGroup.LayoutParams layoutParams = getLayoutParams();
if (layoutParams instanceof ViewGroup.MarginLayoutParams) {
marginBottom = ((ViewGroup.MarginLayoutParams) layoutParams).bottomMargin;
}
return marginBottom;
}
private class ScrollSettleHandler extends Handler {
private static final int TRANSLATE_DURATION_MILLIS = 200;
private int mSettledScrollY;
public void onScroll(int scrollY) {
if (mSettledScrollY != scrollY) {
mSettledScrollY = scrollY;
removeMessages(0);
sendEmptyMessage(0);
}
}
@Override
public void handleMessage(Message msg) {
animate().setInterpolator(mInterpolator)
.setDuration(TRANSLATE_DURATION_MILLIS)
.translationY(mSettledScrollY);
}
}
public void setColorNormal(int color) {
if (color != mColorNormal) {
mColorNormal = color;
updateBackground();
}
}
public void setColorNormalResId(int colorResId) {
setColorNormal(getColor(colorResId));
}
public int getColorNormal() {
return mColorNormal;
}
public void setColorPressed(int color) {
if (color != mColorPressed) {
mColorPressed = color;
updateBackground();
}
}
public void setColorPressedResId(int colorResId) {
setColorPressed(getColor(colorResId));
}
public int getColorPressed() {
return mColorPressed;
}
public void setShadow(boolean shadow) {
if (shadow != mShadow) {
mShadow = shadow;
updateBackground();
}
}
public boolean hasShadow() {
return mShadow;
}
public void setType(@TYPE int type) {
if (type != mType) {
mType = type;
updateBackground();
}
}
@TYPE
public int getType() {
return mType;
}
protected AbsListView.OnScrollListener getOnScrollListener() {
return mOnScrollListener;
}
public void show() {
if (!mVisible) {
mVisible = true;
mScrollSettleHandler.onScroll(-additionalBottomMargin);
}
}
public void hide() {
if (mVisible) {
mVisible = false;
mScrollSettleHandler.onScroll(getHeight() + getMarginBottom());
}
}
public void attachToListView(@NonNull AbsListView listView) {
if (listView == null) {
throw new NullPointerException("AbsListView cannot be null.");
}
mListView = listView;
//HAAAAAACK!!!
try {
Field declaredField = AbsListView.class.getDeclaredField("mOnScrollListener");
declaredField.setAccessible(true);
AbsListView.OnScrollListener internalOnScroll = (AbsListView.OnScrollListener) declaredField.get(mListView);
if (internalOnScroll != null) {
mListView.setOnScrollListener(new CombineOnScrollListener(internalOnScroll, mOnScrollListener));
return;
}
} catch (NoSuchFieldException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
mListView.setOnScrollListener(mOnScrollListener);
}
/**
* A {@link android.os.Parcelable} representing the {@link com.melnykov.fab.FloatingActionButton}'s
* state.
*/
public static class SavedState extends BaseSavedState {
private int mScrollY;
public SavedState(Parcelable parcel) {
super(parcel);
}
private SavedState(Parcel in) {
super(in);
mScrollY = in.readInt();
}
@Override
public void writeToParcel(Parcel out, int flags) {
super.writeToParcel(out, flags);
out.writeInt(mScrollY);
}
public static final Creator<SavedState> CREATOR = new Creator<SavedState>() {
@Override
public SavedState createFromParcel(Parcel in) {
return new SavedState(in);
}
@Override
public SavedState[] newArray(int size) {
return new SavedState[size];
}
};
}
public void setAdditionalBottomMargin(int additionalBottomMargin) {
this.additionalBottomMargin = additionalBottomMargin;
if (mVisible) {
mVisible = false;
show();
} else {
mVisible = true;
hide();
}
}
}
| library/src/main/java/com/melnykov/fab/FloatingActionButton.java | package com.melnykov.fab;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.LayerDrawable;
import android.graphics.drawable.ShapeDrawable;
import android.graphics.drawable.StateListDrawable;
import android.graphics.drawable.shapes.OvalShape;
import android.os.Build;
import android.os.Handler;
import android.os.Message;
import android.os.Parcel;
import android.os.Parcelable;
import android.support.annotation.NonNull;
import android.util.AttributeSet;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.AccelerateDecelerateInterpolator;
import android.view.animation.Interpolator;
import android.widget.AbsListView;
import android.widget.ImageButton;
import java.lang.reflect.Field;
import pl.infakt.infakt.util.CombineOnScrollListener;
/**
* Android Google+ like floating action button which reacts on the attached list view scrolling events.
*
* @author Oleksandr Melnykov
*/
public class FloatingActionButton extends ImageButton {
// @IntDef({TYPE_NORMAL, TYPE_MINI})
public @interface TYPE {
}
public static final int TYPE_NORMAL = 0;
public static final int TYPE_MINI = 1;
protected AbsListView mListView;
private int mScrollY;
private boolean mVisible;
private int mColorNormal;
private int mColorPressed;
private boolean mShadow;
private int mType;
private int additionalBottomMargin;
private final ScrollSettleHandler mScrollSettleHandler = new ScrollSettleHandler();
private final Interpolator mInterpolator = new AccelerateDecelerateInterpolator();
private final AbsListView.OnScrollListener mOnScrollListener = new AbsListView.OnScrollListener() {
@Override
public void onScrollStateChanged(AbsListView view, int scrollState) {
}
@Override
public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) {
int newScrollY = getListViewScrollY();
if (newScrollY == mScrollY) {
return;
}
if (newScrollY > mScrollY) {
// Scrolling up
hide();
} else if (newScrollY < mScrollY) {
// Scrolling down
show();
}
mScrollY = newScrollY;
}
};
public FloatingActionButton(Context context) {
this(context, null);
}
public FloatingActionButton(Context context, AttributeSet attrs) {
super(context, attrs);
init(context, attrs);
}
public FloatingActionButton(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init(context, attrs);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int size = getDimension(
mType == TYPE_NORMAL ? R.dimen.fab_size_normal : R.dimen.fab_size_mini);
if (mShadow) {
int shadowSize = getDimension(R.dimen.fab_shadow_size);
size += shadowSize * 2;
}
setMeasuredDimension(size, size);
}
@Override
public Parcelable onSaveInstanceState() {
Parcelable superState = super.onSaveInstanceState();
SavedState savedState = new SavedState(superState);
savedState.mScrollY = mScrollY;
return savedState;
}
@Override
public void onRestoreInstanceState(Parcelable state) {
if (state instanceof SavedState) {
SavedState savedState = (SavedState) state;
mScrollY = savedState.mScrollY;
super.onRestoreInstanceState(savedState.getSuperState());
} else {
super.onRestoreInstanceState(state);
}
}
private void init(Context context, AttributeSet attributeSet) {
mVisible = true;
mColorNormal = getColor(android.R.color.holo_blue_dark);
mColorPressed = getColor(android.R.color.holo_blue_light);
mType = TYPE_NORMAL;
mShadow = true;
if (attributeSet != null) {
initAttributes(context, attributeSet);
}
updateBackground();
}
private void initAttributes(Context context, AttributeSet attributeSet) {
TypedArray attr = getTypedArray(context, attributeSet, R.styleable.FloatingActionButton);
if (attr != null) {
try {
mColorNormal = attr.getColor(R.styleable.FloatingActionButton_fab_colorNormal,
getColor(android.R.color.holo_blue_dark));
mColorPressed = attr.getColor(R.styleable.FloatingActionButton_fab_colorPressed,
getColor(android.R.color.holo_blue_light));
mShadow = attr.getBoolean(R.styleable.FloatingActionButton_fab_shadow, true);
mType = attr.getInt(R.styleable.FloatingActionButton_fab_type, TYPE_NORMAL);
} finally {
attr.recycle();
}
}
}
private void updateBackground() {
StateListDrawable drawable = new StateListDrawable();
drawable.addState(new int[]{android.R.attr.state_pressed}, createDrawable(mColorPressed));
drawable.addState(new int[]{}, createDrawable(mColorNormal));
setBackgroundCompat(drawable);
}
private Drawable createDrawable(int color) {
OvalShape ovalShape = new OvalShape();
ShapeDrawable shapeDrawable = new ShapeDrawable(ovalShape);
shapeDrawable.getPaint().setColor(color);
if (mShadow) {
LayerDrawable layerDrawable = new LayerDrawable(
new Drawable[]{getResources().getDrawable(R.drawable.shadow),
shapeDrawable});
int shadowSize = getDimension(
mType == TYPE_NORMAL ? R.dimen.fab_shadow_size : R.dimen.fab_mini_shadow_size);
layerDrawable.setLayerInset(1, shadowSize, shadowSize, shadowSize, shadowSize);
return layerDrawable;
} else {
return shapeDrawable;
}
}
private TypedArray getTypedArray(Context context, AttributeSet attributeSet, int[] attr) {
return context.obtainStyledAttributes(attributeSet, attr, 0, 0);
}
private int getColor(int id) {
return getResources().getColor(id);
}
private int getDimension(int id) {
return getResources().getDimensionPixelSize(id);
}
@SuppressWarnings("deprecation")
@SuppressLint("NewApi")
private void setBackgroundCompat(Drawable drawable) {
if (Build.VERSION.SDK_INT >= 16) {
setBackground(drawable);
} else {
setBackgroundDrawable(drawable);
}
}
protected int getListViewScrollY() {
View topChild = mListView.getChildAt(0);
return topChild == null ? 0 : mListView.getFirstVisiblePosition() * topChild.getHeight() -
topChild.getTop();
}
private int getMarginBottom() {
int marginBottom = 0;
final ViewGroup.LayoutParams layoutParams = getLayoutParams();
if (layoutParams instanceof ViewGroup.MarginLayoutParams) {
marginBottom = ((ViewGroup.MarginLayoutParams) layoutParams).bottomMargin;
}
return marginBottom;
}
private class ScrollSettleHandler extends Handler {
private static final int TRANSLATE_DURATION_MILLIS = 200;
private int mSettledScrollY;
public void onScroll(int scrollY) {
if (mSettledScrollY != scrollY) {
mSettledScrollY = scrollY;
removeMessages(0);
sendEmptyMessage(0);
}
}
@Override
public void handleMessage(Message msg) {
animate().setInterpolator(mInterpolator)
.setDuration(TRANSLATE_DURATION_MILLIS)
.translationY(mSettledScrollY);
}
}
public void setColorNormal(int color) {
if (color != mColorNormal) {
mColorNormal = color;
updateBackground();
}
}
public void setColorNormalResId(int colorResId) {
setColorNormal(getColor(colorResId));
}
public int getColorNormal() {
return mColorNormal;
}
public void setColorPressed(int color) {
if (color != mColorPressed) {
mColorPressed = color;
updateBackground();
}
}
public void setColorPressedResId(int colorResId) {
setColorPressed(getColor(colorResId));
}
public int getColorPressed() {
return mColorPressed;
}
public void setShadow(boolean shadow) {
if (shadow != mShadow) {
mShadow = shadow;
updateBackground();
}
}
public boolean hasShadow() {
return mShadow;
}
public void setType(@TYPE int type) {
if (type != mType) {
mType = type;
updateBackground();
}
}
@TYPE
public int getType() {
return mType;
}
protected AbsListView.OnScrollListener getOnScrollListener() {
return mOnScrollListener;
}
public void show() {
if (!mVisible) {
mVisible = true;
mScrollSettleHandler.onScroll(-additionalBottomMargin);
}
}
public void hide() {
if (mVisible) {
mVisible = false;
mScrollSettleHandler.onScroll(getHeight() + getMarginBottom());
}
}
public void attachToListView(@NonNull AbsListView listView) {
if (listView == null) {
throw new NullPointerException("AbsListView cannot be null.");
}
mListView = listView;
//HAAAAAACK!!!
try {
Field declaredField = AbsListView.class.getDeclaredField("mOnScrollListener");
declaredField.setAccessible(true);
AbsListView.OnScrollListener internalOnScroll = (AbsListView.OnScrollListener) declaredField.get(mListView);
if (internalOnScroll != null) {
mListView.setOnScrollListener(new CombineOnScrollListener(internalOnScroll, mOnScrollListener));
return;
}
} catch (NoSuchFieldException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
mListView.setOnScrollListener(mOnScrollListener);
}
/**
* A {@link android.os.Parcelable} representing the {@link com.melnykov.fab.FloatingActionButton}'s
* state.
*/
public static class SavedState extends BaseSavedState {
private int mScrollY;
public SavedState(Parcelable parcel) {
super(parcel);
}
private SavedState(Parcel in) {
super(in);
mScrollY = in.readInt();
}
@Override
public void writeToParcel(Parcel out, int flags) {
super.writeToParcel(out, flags);
out.writeInt(mScrollY);
}
public static final Creator<SavedState> CREATOR = new Creator<SavedState>() {
@Override
public SavedState createFromParcel(Parcel in) {
return new SavedState(in);
}
@Override
public SavedState[] newArray(int size) {
return new SavedState[size];
}
};
}
public void setAdditionalBottomMargin(int additionalBottomMargin) {
this.additionalBottomMargin = additionalBottomMargin;
if (mVisible) {
mVisible = false;
show();
} else {
mVisible = true;
hide();
}
}
}
| Task #122842 layout corrected
| library/src/main/java/com/melnykov/fab/FloatingActionButton.java | Task #122842 layout corrected |
|
Java | cc0-1.0 | 917fc7bbc9cd8dc22d34b0c1210dc5a2d77a1d01 | 0 | supernelis/ExperiementJavaCspSudoku | package com.archiwise.experiment.cspsudoku.domein;
import org.jcsp.lang.AltingChannelInput;
import org.jcsp.lang.CSProcess;
import org.jcsp.lang.ChannelInput;
import org.jcsp.lang.ChannelOutput;
import org.jcsp.lang.One2OneChannel;
import java.util.Optional;
/**
* Created by nelis on 02/11/15.
*/
public class Region implements CSProcess, ValueListener {
private Grid grid = new Grid();
private ChannelOutput<ValueAtPos> display;
private Optional<AltingChannelInput<ValueAtPos>> westIn = Optional.empty();
private Optional<ChannelOutput<ValueAtPos>> eastOut = Optional.empty();
private Optional<ChannelOutput<ValueAtPos>> westOut = Optional.empty();
private Optional<AltingChannelInput<ValueAtPos>> eastIn = Optional.empty();
private Optional<AltingChannelInput<ValueAtPos>> nordIn = Optional.empty();
private Optional<ChannelOutput<ValueAtPos>> nordOut = Optional.empty();
private Optional<ChannelOutput<ValueAtPos>> southOut = Optional.empty();
private Optional<AltingChannelInput<ValueAtPos>> southIn = Optional.empty();
public Region(final ChannelOutput display) {
this.display = display;
grid.addObserver(this);
}
public void setPredefinedValue(final ValueAtPos valueAtPos){
grid.setValueAtPosition(valueAtPos);
}
@Override
public void notifyValue(final ValueAtPos valueAtPos) {
display.write(valueAtPos);
writeIfPresent(valueAtPos,eastOut);
writeIfPresent(valueAtPos,nordOut);
writeIfPresent(valueAtPos,southOut);
writeIfPresent(valueAtPos,westOut);
}
private void writeIfPresent(final ValueAtPos valueAtPos, final Optional<ChannelOutput<ValueAtPos>> channel) {
if(channel.isPresent()) channel.get().write(valueAtPos);
}
@Override
public void run() {
processNeighborRowMessage(westIn, eastOut);
processNeighborRowMessage(eastIn, westOut);
processNeighborColMessage(nordIn, southOut);
processNeighborColMessage(southIn, nordOut);
}
public void setWestOut(final ChannelOutput<ValueAtPos> westOut) {
this.westOut = Optional.of(westOut);
}
public void setEastIn(final AltingChannelInput<ValueAtPos> eastIn) {
this.eastIn = Optional.of(eastIn);
}
public void setNordIn(final AltingChannelInput<ValueAtPos> nordIn) {
this.nordIn = Optional.of(nordIn);
}
public void setNordOut(final ChannelOutput<ValueAtPos> nordOut) {
this.nordOut = Optional.of(nordOut);
}
public void setSouthOut(final ChannelOutput<ValueAtPos> southOut) {
this.southOut = Optional.of(southOut);
}
public void setSouthIn(final AltingChannelInput<ValueAtPos> southIn) {
this.southIn = Optional.of(southIn);
}
private void processNeighborRowMessage(final Optional<AltingChannelInput<ValueAtPos>> channelIn, final Optional<ChannelOutput<ValueAtPos>> channelOut) {
while(channelContainsValue(channelIn)){
ValueAtPos valueIn = channelIn.get().read();
grid.valueSetOnNeigtborRow(valueIn.getValue(),valueIn.getRow());
writeIfPresent(valueIn,channelOut);
}
}
private void processNeighborColMessage(final Optional<AltingChannelInput<ValueAtPos>> channelIn, final Optional<ChannelOutput<ValueAtPos>> channelOut) {
while(channelContainsValue(channelIn)){
ValueAtPos valueIn = channelIn.get().read();
grid.valueSetOnNeigtborCol(valueIn.getValue(),valueIn.getCol());
writeIfPresent(valueIn,channelOut);
}
}
private boolean channelContainsValue(final Optional<AltingChannelInput<ValueAtPos>> channelIn) {
return channelIn.isPresent() && channelIn.get().pending();
}
public void setWestIn(final AltingChannelInput<ValueAtPos> westIn) {
this.westIn = Optional.of(westIn);
}
public void setEastOut(final ChannelOutput<ValueAtPos> eastOut) {
this.eastOut = Optional.of(eastOut);
}
}
| src/main/java/com/archiwise/experiment/cspsudoku/domein/Region.java | package com.archiwise.experiment.cspsudoku.domein;
import org.jcsp.lang.AltingChannelInput;
import org.jcsp.lang.CSProcess;
import org.jcsp.lang.ChannelInput;
import org.jcsp.lang.ChannelOutput;
import org.jcsp.lang.One2OneChannel;
import java.util.Optional;
/**
* Created by nelis on 02/11/15.
*/
public class Region implements CSProcess, ValueListener {
private Grid grid = new Grid();
private ChannelOutput<ValueAtPos> display;
private Optional<AltingChannelInput<ValueAtPos>> westIn = Optional.empty();
private Optional<ChannelOutput<ValueAtPos>> eastOut = Optional.empty();
private Optional<ChannelOutput<ValueAtPos>> westOut = Optional.empty();
private Optional<AltingChannelInput<ValueAtPos>> eastIn = Optional.empty();
private Optional<AltingChannelInput<ValueAtPos>> nordIn = Optional.empty();
private Optional<ChannelOutput<ValueAtPos>> nordOut = Optional.empty();
private Optional<ChannelOutput<ValueAtPos>> southOut = Optional.empty();
private Optional<AltingChannelInput<ValueAtPos>> southIn = Optional.empty();
public Region(final ChannelOutput display) {
this.display = display;
grid.addObserver(this);
}
public void setPredefinedValue(final ValueAtPos valueAtPos){
grid.setValueAtPosition(valueAtPos);
}
@Override
public void notifyValue(final ValueAtPos valueAtPos) {
display.write(valueAtPos);
writeIfPresent(valueAtPos,eastOut);
writeIfPresent(valueAtPos,nordOut);
writeIfPresent(valueAtPos,southOut);
writeIfPresent(valueAtPos,westOut);
}
private void writeIfPresent(final ValueAtPos valueAtPos, final Optional<ChannelOutput<ValueAtPos>> channel) {
if(channel.isPresent()) channel.get().write(valueAtPos);
}
@Override
public void run() {
processNeighborRowMessage(westIn, eastOut);
processNeighborRowMessage(eastIn, westOut);
processNeighborColMessage(nordIn, southOut);
processNeighborColMessage(southIn, nordOut);
}
public void setWestOut(final ChannelOutput<ValueAtPos> westOut) {
this.westOut = Optional.of(westOut);
}
public void setEastIn(final AltingChannelInput<ValueAtPos> eastIn) {
this.eastIn = Optional.of(eastIn);
}
public void setNordIn(final AltingChannelInput<ValueAtPos> nordIn) {
this.nordIn = Optional.of(nordIn);
}
public void setNordOut(final ChannelOutput<ValueAtPos> nordOut) {
this.nordOut = Optional.of(nordOut);
}
public void setSouthOut(final ChannelOutput<ValueAtPos> southOut) {
this.southOut = Optional.of(southOut);
}
public void setSouthIn(final AltingChannelInput<ValueAtPos> southIn) {
this.southIn = Optional.of(southIn);
}
private void processNeighborRowMessage(final Optional<AltingChannelInput<ValueAtPos>> channelIn, final Optional<ChannelOutput<ValueAtPos>> channelOut) {
if(channelContainsValue(channelIn)){
ValueAtPos valueIn = channelIn.get().read();
grid.valueSetOnNeigtborRow(valueIn.getValue(),valueIn.getRow());
writeIfPresent(valueIn,channelOut);
}
}
private void processNeighborColMessage(final Optional<AltingChannelInput<ValueAtPos>> channelIn, final Optional<ChannelOutput<ValueAtPos>> channelOut) {
if(channelContainsValue(channelIn)){
ValueAtPos valueIn = channelIn.get().read();
grid.valueSetOnNeigtborCol(valueIn.getValue(),valueIn.getCol());
writeIfPresent(valueIn,channelOut);
}
}
private boolean channelContainsValue(final Optional<AltingChannelInput<ValueAtPos>> channelIn) {
return channelIn.isPresent() && channelIn.get().pending();
}
public void setWestIn(final AltingChannelInput<ValueAtPos> westIn) {
this.westIn = Optional.of(westIn);
}
public void setEastOut(final ChannelOutput<ValueAtPos> eastOut) {
this.eastOut = Optional.of(eastOut);
}
}
| Fixed tests When4Col2RowValuesSet_ThenValueIsKnown, making the run of a region more robust
| src/main/java/com/archiwise/experiment/cspsudoku/domein/Region.java | Fixed tests When4Col2RowValuesSet_ThenValueIsKnown, making the run of a region more robust |
|
Java | agpl-3.0 | e8754fce32c6d9be527157c67460deddda84c711 | 0 | duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test | 3c58ce36-2e60-11e5-9284-b827eb9e62be | hello.java | 3c5366da-2e60-11e5-9284-b827eb9e62be | 3c58ce36-2e60-11e5-9284-b827eb9e62be | hello.java | 3c58ce36-2e60-11e5-9284-b827eb9e62be |
|
Java | agpl-3.0 | e982df074cfbeded24d9ee53016d3428375eff11 | 0 | MarkehMe/FactionsAlias | package com.markehme.factionsalias.support;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import com.massivecraft.factions.P;
import com.massivecraft.factions.cmd.FCommand;
/**
* Factions 1.x Support
*
* @author MarkehMe<[email protected]>
*
*/
public class Factions1X implements SupportBase {
List<Factions1XCommandSkeleton> commands = new ArrayList<Factions1XCommandSkeleton>();
List<String> helpLines = new ArrayList<String>();
private boolean is16 = false;
public Factions1X(HashMap<String, String> settings) {
if(settings.containsKey("16")) {
if(settings.get("16") == "Y") {
is16 = true;
}
}
}
public void add(List<String> aliases,
Boolean requiresFactionsEnabled,
Boolean requiresIsPlayer,
Boolean requiresInFaction,
String permission,
String permissionDeniedMessage,
String desc,
String executingCommand) {
Factions1XCommandSkeleton command = new Factions1XCommandSkeleton(
aliases,
requiresFactionsEnabled,
requiresIsPlayer,
requiresInFaction,
permission,
permissionDeniedMessage,
desc,
executingCommand
);
commands.add(command);
P.p.cmdBase.addSubCommand(
(FCommand) command
);
if(is16) {
// Add help lines - this is only needed in 1.6.x
helpLines.add(command.getUseageTemplate(true));
}
}
@Override
public void unregister() {
for (int i=0; i < commands.size(); i++) {
P.p.cmdBase.getSubCommands().remove(commands.get(i));
}
}
@Override
public void finishCall() {
P.p.cmdBase.cmdHelp.updateHelp();
// Only 1.6 needs pages to be added,
// as 1.7 does it auto.
if(is16) {
// Ensure there are help lines to be added
if(helpLines.size() > 0 ) {
ArrayList<String> pageLines = new ArrayList<String>();
int i = 0;
for(String line : helpLines) {
if(i >= 7) {
i = 0; // reset the count
P.p.cmdBase.cmdHelp.helpPages.add(pageLines); // add our page
pageLines.clear(); // clear the current lines
}
i++;
pageLines.add(line); // add a line
}
// Add any leftover lines that haven't made a full page
if(i > 0) {
P.p.cmdBase.cmdHelp.helpPages.add(pageLines);
pageLines.clear();
}
}
}
}
}
| src/com/markehme/factionsalias/support/Factions1X.java | package com.markehme.factionsalias.support;
import java.util.List;
import com.massivecraft.factions.P;
import com.massivecraft.factions.cmd.FCommand;
/**
* Factions 1.x Support
* Does not have support for /f help (yet)
*
* @author MarkehMe<[email protected]>
*
*/
public class Factions1X implements SupportBase {
public void add(List<String> aliases,
Boolean requiresFactionsEnabled,
Boolean requiresIsPlayer,
Boolean requiresInFaction,
String permission,
String permissionDeniedMessage,
String desc,
String executingCommand) {
P.p.cmdBase.addSubCommand(
(FCommand) new Factions1XCommandSkeleton(
aliases,
requiresFactionsEnabled,
requiresIsPlayer,
requiresInFaction,
permission,
permissionDeniedMessage,
desc,
executingCommand)
);
}
}
| Includes help pages
| src/com/markehme/factionsalias/support/Factions1X.java | Includes help pages |
|
Java | lgpl-2.1 | 55c670678bf93a1c9f94e9ee045f6cd2268c2dff | 0 | quarnster/silence | /* Channel.java - Handles a channel
* Copyright (C) 2000-2001 Fredrik Ehnbom
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.gjt.fredde.silence.format.xm;
/**
* A class that handles a channel
*
* @version $Id: Channel.java,v 1.9 2001/01/04 18:55:59 fredde Exp $
* @author Fredrik Ehnbom
*/
class Channel {
private Xm xm;
public Channel(Xm xm) {
this.xm = xm;
}
int currentNote = 0;
Instrument currentInstrument = null;
int currentVolume = 64;
int currentEffect = -1;
int currentEffectParam = 0;
double currentPitch = 0;
double currentPos = 0;
double currentLoopLen = 0;
boolean useVolEnv = false;
boolean sustain = false;
float volEnvK = 0;
float volEnv = 64;
int volEnvLoopLen = 0;
int volEnvLength = 0;
int volEnvType = 0;
int volEnvPos = 0;
int volEnvSustain = 0;
float rowVol;
float finalVol;
int fadeOutVol;
private final double calcPitch(int note) {
if (currentInstrument.sample.length == 0) return 0;
note += currentInstrument.sample[0].relativeNote - 1;
int period = (10*12*16*4) - (note*16*4) - (currentInstrument.sample[0].fineTune / 2);
double freq = 8363d * Math.pow(2d, ((6d * 12d * 16d * 4d - period) / (double) (12 * 16 * 4)));
double pitch = freq / (double) xm.deviceSampleRate;
return pitch;
}
private final void updateEffects() {
switch (currentEffect) {
case 0x0A: // Volume slide
currentVolume += (currentEffectParam & 0xF0) != 0 ?
(currentEffectParam >> 4) & 0xF :
-(currentEffectParam & 0xF);
currentVolume = currentVolume < 0 ? 0 : currentVolume > 64 ? 64 : currentVolume;
rowVol = (( currentVolume / 64f) * 32f);
if (currentVolume == 0 && (currentEffectParam & 0xF0) == 0) currentEffect = -1;
break;
case 0x0C: // set volume
currentVolume = currentEffectParam;
currentEffect = -1;
rowVol = (((float) currentVolume / 64) * 32f);
if (currentInstrument != null) rowVol *= ((float) currentInstrument.sample[0].volume / 64);
break;
case 0x0E: // extended MOD commands
int eff = (currentEffectParam >> 4) & 0xF;
if (eff == 0x0C) { // note cut
if ((currentEffectParam & 0xF) == 0) {
currentVolume = 0;
} else {
currentEffectParam = (eff << 4) + (currentEffectParam & 0xF) - 1;
}
}
break;
case 0x0F: // set tempo
if (currentEffectParam > 0x20) {
xm.defaultBpm = currentEffectParam;
xm.samplesPerTick = (5 * xm.deviceSampleRate) / (2 * xm.defaultBpm);
} else {
xm.defaultTempo = currentEffectParam;
xm.tempo = xm.defaultTempo;
}
currentEffect = -1;
break;
case 0x10: // set global volume (Gxx)
xm.globalVolume = currentEffectParam;
currentEffect = -1;
break;
case 0x11: // global volume slide (Hxx)
if (xm.tempo + 1 == xm.defaultTempo) return;
if (xm.tempo <= 1) currentEffect = -1;
xm.globalVolume += (currentEffectParam & 0xF0) != 0 ?
(currentEffectParam >> 4) & 0xF :
-(currentEffectParam & 0xF);
break;
default: // unknown effect
currentEffect = -1;
break;
}
}
private final void updateVolumes() {
finalVol = rowVol;
if (currentNote == 97) {
finalVol *= ((float) fadeOutVol / 65536);
fadeOutVol -= currentInstrument.fadeoutVolume;
if (fadeOutVol <= 10) {
currentInstrument = null;
return;
}
}
finalVol *= (volEnv / 64);
if (xm.globalVolume != 64) finalVol *= ((double) xm.globalVolume / 64);
if (useVolEnv) {
if (currentNote == 97) {
volEnv += volEnvK;
if (volEnvLength <= 0) {
volEnvPos++;
if (volEnvPos == volEnvLoopLen) {
if ((volEnvType & 0x4) != 0) {
volEnvPos = currentInstrument.volLoopStart;
volEnv = currentInstrument.volumeEnvelopePoints[volEnvPos].y;
volEnvK = currentInstrument.volumeEnvInfo[volEnvPos].y;
volEnvLength = (int) currentInstrument.volumeEnvInfo[volEnvPos].x;
} else {
volEnv = currentInstrument.volumeEnvelopePoints[volEnvPos].y;
volEnvK = 0;
useVolEnv = false;
if (volEnv <= 1) {
currentInstrument = null;
return;
}
}
} else {
volEnv = currentInstrument.volumeEnvelopePoints[volEnvPos].y;
volEnvK = currentInstrument.volumeEnvInfo[volEnvPos].y;
volEnvLength = (int) currentInstrument.volumeEnvInfo[volEnvPos].x;
}
}
volEnvLength--;
} else if (!sustain) { // note != 97
volEnv += volEnvK;
if (volEnvLength <= 0) {
volEnvPos++;
if ((volEnvPos == volEnvSustain && (volEnvType & 0x2) != 0)) {
volEnv = currentInstrument.volumeEnvelopePoints[volEnvPos].y;
volEnvK = currentInstrument.volumeEnvInfo[volEnvPos].y;
volEnvLength = (int) currentInstrument.volumeEnvInfo[volEnvPos].x;
sustain = true;
} else if (volEnvPos == volEnvLoopLen) {
if ((volEnvType & 0x4) != 0) { // loop
volEnvPos = currentInstrument.volLoopStart;
volEnv = currentInstrument.volumeEnvelopePoints[volEnvPos].y;
volEnvLoopLen = currentInstrument.volLoopEnd;
volEnvK = currentInstrument.volumeEnvInfo[volEnvPos].y;
volEnvLength = (int) currentInstrument.volumeEnvInfo[volEnvPos].x;
} else {
volEnv = currentInstrument.volumeEnvelopePoints[volEnvPos].y;
volEnvK = 0;
useVolEnv = false;
}
} else {
volEnv = currentInstrument.volumeEnvelopePoints[volEnvPos].y;
volEnvK = currentInstrument.volumeEnvInfo[volEnvPos].y;
volEnvLength = (int) currentInstrument.volumeEnvInfo[volEnvPos].x;
}
}
volEnvLength--;
}
} else if (currentNote == 97) {
currentInstrument = null;
return;
}
}
final int skip(Pattern pattern, int patternpos) {
int check = pattern.data[patternpos++];
if ((check & 0x80) != 0) {
if ((check & 0x1) != 0) patternpos++;
if ((check & 0x2) != 0) patternpos++;
if ((check & 0x4) != 0) patternpos++;
if ((check & 0x8) != 0) patternpos++;
if ((check & 0x10) != 0) patternpos++;
} else {
patternpos += 4;
}
return patternpos;
}
final int update(Pattern pattern, int patternpos) {
int check = pattern.data[patternpos++];
if ((check & 0x80) != 0) {
// note
if ((check & 0x1) != 0)
currentNote = pattern.data[patternpos++];
// instrument
if ((check & 0x2) != 0) {
currentEffect = -1;
currentInstrument = xm.instrument[pattern.data[patternpos++] - 1];
currentLoopLen = currentInstrument.sample[0].sampleData.length - 1;
currentPitch = calcPitch(currentNote);
currentPos = 0;
currentVolume = 64;
fadeOutVol = 65536;
if (currentInstrument.volumeEnvelopePoints.length != 0) {
volEnv = currentInstrument.volumeEnvelopePoints[0].y;
volEnvPos = 0;
volEnvK = currentInstrument.volumeEnvInfo[volEnvPos].y;
volEnvLength = (int) currentInstrument.volumeEnvInfo[volEnvPos].x;
volEnvSustain = currentInstrument.volSustain;
volEnvType = currentInstrument.volType;
useVolEnv = ((volEnvType & 0x1) != 0);
if ((volEnvType & 0x4) != 0)
volEnvLoopLen = currentInstrument.volLoopEnd;
else
volEnvLoopLen = currentInstrument.volumeEnvelopePoints.length - 1;
sustain = (volEnvPos == volEnvSustain && (volEnvType & 0x2) != 0);
} else {
useVolEnv = false;
volEnv = 64;
volEnvK = 0;
volEnvPos = 0;
}
if (currentInstrument.sample.length == 0) currentInstrument = null;
}
// volume
if ((check & 0x4) != 0) {
int tmp = pattern.data[patternpos++]&0xff;
if (tmp <= 0x50) { // volume
currentVolume = tmp-1;
} else if (tmp < 0x70) { // volume slide down
currentEffect = 0x0A;
currentEffectParam = (tmp - 0x40);
} else if (tmp < 0x80) { // volume slide up
currentEffect = 0x0A;
currentEffectParam = (tmp - 0x70);
} else if (tmp < 0x90) { // fine volume slide down
currentVolume -= (tmp - 0x80);
} else if (tmp < 0xa0) { // fine volume slide up
currentVolume += (tmp - 0x90);
} else if (tmp < 0xb0) { // vibrato speed
} else if (tmp < 0xc0) { // vibrato
} else if (tmp < 0xd0) { // set panning
} else if (tmp < 0xe0) { // panning slide left
} else if (tmp < 0xf0) { // panning slide right
} else if (tmp >= 0xf0) { // Tone porta
}
}
// effect
if ((check & 0x8) != 0) {
currentEffect = pattern.data[patternpos++];
currentEffectParam = 0;
}
// effect param
if ((check & 0x10) != 0)
currentEffectParam = pattern.data[patternpos++]&0xff;
} else {
currentNote = check;
currentInstrument = xm.instrument[pattern.data[patternpos++] - 1];
currentLoopLen = currentInstrument.sample[0].sampleData.length - 1;
currentVolume = 64;
int tmp = pattern.data[patternpos++]&0xff;
if (tmp <= 0x50) { // volume
currentVolume = tmp;
} else if (tmp < 0x70) { // volume slide down
currentEffect = 0x0A;
currentEffectParam = (tmp - 0x60);
} else if (tmp < 0x80) { // volume slide up
currentEffect = 0x0A;
currentEffectParam = (tmp - 0x70);
} else if (tmp < 0x90) { // fine volume slide down
currentVolume -= (tmp - 0x80);
} else if (tmp < 0xa0) { // fine volume slide up
currentVolume += (tmp - 0x90);
} else if (tmp < 0xb0) { // vibrato speed
} else if (tmp < 0xc0) { // vibrato
} else if (tmp < 0xd0) { // set panning
} else if (tmp < 0xe0) { // panning slide left
} else if (tmp < 0xf0) { // panning slide right
} else if (tmp >= 0xf0) { // Tone porta
}
currentEffect = pattern.data[patternpos++];
currentEffectParam = pattern.data[patternpos++]&0xff;
fadeOutVol = 65536;
if (currentInstrument.volumeEnvelopePoints.length != 0) {
volEnv = currentInstrument.volumeEnvelopePoints[0].y;
volEnvPos = 0;
volEnvK = currentInstrument.volumeEnvInfo[volEnvPos].y;
volEnvLength = (int) currentInstrument.volumeEnvInfo[volEnvPos].x;
volEnvSustain = currentInstrument.volSustain;
volEnvType = currentInstrument.volType;
if ((volEnvType & 0x4) != 0)
volEnvLoopLen = currentInstrument.volLoopEnd;
else
volEnvLoopLen = currentInstrument.volumeEnvelopePoints.length - 1;
useVolEnv = ((volEnvType & 0x1) != 0);
sustain = (volEnvPos == volEnvSustain && (volEnvType & 0x2) != 0);
} else {
useVolEnv = false;
volEnv = 64;
volEnvK = 0;
volEnvPos = 0;
}
currentPitch = calcPitch(currentNote);
currentPos = 0;
}
rowVol = (( currentVolume / 64f) * 32f);
if (currentInstrument != null) rowVol *= (currentInstrument.sample[0].volume / 64f);
return patternpos;
}
public final void updateTick() {
if (currentEffect != -1) updateEffects();
if (currentInstrument != null) updateVolumes();
}
final void play(int[] buffer, int off, int len) {
if (currentInstrument == null || finalVol < 1 || currentNote == 0) return;
for (int i = off; i < off+len; i++) {
int sample = (int) (currentInstrument.sample[0].sampleData[(int) currentPos] * finalVol);
buffer[i] += (sample & 65535) | (sample << 16);
currentPos += currentPitch;
currentLoopLen += (currentPitch < 0) ? currentPitch : -currentPitch;
if (currentLoopLen <= 0) {
if ((currentInstrument.sample[0].loopType & 0x2) != 0) {
// pingpong loop
currentPitch = -currentPitch;
if (currentPitch < 0) {
currentPos = currentInstrument.sample[0].loopStart + currentInstrument.sample[0].loopEnd-1;
} else {
currentPos = currentInstrument.sample[0].loopStart;
}
currentLoopLen = currentInstrument.sample[0].loopEnd;
} else if ((currentInstrument.sample[0].loopType & 0x1) != 0) {
// forward loop
currentPos = currentInstrument.sample[0].loopStart;
currentLoopLen = currentInstrument.sample[0].loopEnd;
} else {
// no loop
currentInstrument = null;
return;
}
}
}
}
}
/*
* ChangeLog:
* $Log: Channel.java,v $
* Revision 1.9 2001/01/04 18:55:59 fredde
* some smaller changes
*
* Revision 1.8 2000/12/21 17:19:59 fredde
* volumeenvelopes works better, uses precalced k-values,
* pingpong loop fixed
*
* Revision 1.7 2000/10/14 19:09:04 fredde
* changed volume stuff back to 32 since
* sampleData is of type byte[] again
*
* Revision 1.6 2000/10/12 15:04:42 fredde
* fixed volume envelopes after sustain.
* updated volumes to work with (8-bit sample) << 8
*
* Revision 1.5 2000/10/08 18:01:57 fredde
* changes to play the file even better.
*
* Revision 1.4 2000/10/07 13:48:06 fredde
* Lots of fixes to play correct.
* Added volume stuff.
*
* Revision 1.3 2000/10/01 17:06:38 fredde
* basic playing abilities added
*
* Revision 1.2 2000/09/29 19:39:48 fredde
* no need to be public
*
* Revision 1.1.1.1 2000/09/25 16:34:34 fredde
* initial commit
*
*/
| silence/format/xm/Channel.java | /* Channel.java - Handles a channel
* Copyright (C) 2000 Fredrik Ehnbom
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.gjt.fredde.silence.format.xm;
/**
* A class that handles a channel
*
* @version $Id: Channel.java,v 1.8 2000/12/21 17:19:59 fredde Exp $
* @author Fredrik Ehnbom
*/
class Channel {
private Xm xm;
public Channel(Xm xm) {
this.xm = xm;
}
int currentNote = 0;
Instrument currentInstrument = null;
int currentVolume = 64;
int currentEffect = -1;
int currentEffectParam = 0;
double currentPitch = 0;
double currentPos = 0;
double currentLoopLen = 0;
boolean useVolEnv = false;
boolean sustain = false;
float volEnvK = 0;
float volEnv = 64;
int volEnvLoopLen = 0;
int volEnvLength = 0;
int volEnvType = 0;
int volEnvPos = 0;
int volEnvSustain = 0;
float rowVol;
float finalVol;
int fadeOutVol;
private final double calcPitch(int note) {
if (currentInstrument.sample.length == 0) return 0;
note += currentInstrument.sample[0].relativeNote - 1;
int period = (10*12*16*4) - (note*16*4) - (currentInstrument.sample[0].fineTune / 2);
double freq = 8363d * Math.pow(2d, ((6d * 12d * 16d * 4d - period) / (double) (12 * 16 * 4)));
double pitch = freq / (double) xm.deviceSampleRate;
return pitch;
}
private final void updateEffects() {
switch (currentEffect) {
case 0x0A: // Volume slide
currentVolume += (currentEffectParam & 0xF0) != 0 ?
(currentEffectParam >> 4) & 0xF :
-(currentEffectParam & 0xF);
currentVolume = currentVolume < 0 ? 0 : currentVolume > 64 ? 64 : currentVolume;
rowVol = (( currentVolume / 64f) * 32f);
if (currentVolume == 0 && (currentEffectParam & 0xF0) == 0) currentEffect = -1;
break;
case 0x0C: // set volume
currentVolume = currentEffectParam;
currentEffect = -1;
rowVol = (((float) currentVolume / 64) * 32f);
if (currentInstrument != null) rowVol *= ((float) currentInstrument.sample[0].volume / 64);
break;
case 0x0E: // extended MOD commands
int eff = (currentEffectParam >> 4) & 0xF;
if (eff == 0x0C) { // note cut
if ((currentEffectParam & 0xF) == 0) {
currentVolume = 0;
} else {
currentEffectParam = (eff << 4) + (currentEffectParam & 0xF) - 1;
}
}
break;
case 0x0F: // set tempo
if (currentEffectParam > 0x20) {
xm.defaultBpm = currentEffectParam;
xm.samplesPerTick = (5 * xm.deviceSampleRate) / (2 * xm.defaultBpm);
} else {
xm.defaultTempo = currentEffectParam;
xm.tempo = xm.defaultTempo;
}
currentEffect = -1;
break;
case 0x10: // set global volume (Gxx)
xm.globalVolume = currentEffectParam;
currentEffect = -1;
break;
case 0x11: // global volume slide (Hxx)
if (xm.tempo + 1 == xm.defaultTempo) return;
if (xm.tempo <= 1) currentEffect = -1;
xm.globalVolume += (currentEffectParam & 0xF0) != 0 ?
(currentEffectParam >> 4) & 0xF :
-(currentEffectParam & 0xF);
break;
default: // unknown effect
currentEffect = -1;
break;
}
}
private final void updateVolumes() {
finalVol = rowVol;
if (currentNote == 97) {
finalVol *= ((float) fadeOutVol / 65536);
fadeOutVol -= currentInstrument.fadeoutVolume;
if (fadeOutVol <= 10) {
currentInstrument = null;
return;
}
}
finalVol *= (volEnv / 64);
if (xm.globalVolume != 64) finalVol *= ((double) xm.globalVolume / 64);
if (useVolEnv) {
if (currentNote == 97) {
volEnv += volEnvK;
if (volEnvLength <= 0) {
volEnvPos++;
if (volEnvPos == volEnvLoopLen) {
if ((volEnvType & 0x4) != 0) {
volEnvPos = currentInstrument.volLoopStart;
volEnv = currentInstrument.volumeEnvelopePoints[volEnvPos].y;
volEnvK = currentInstrument.volumeEnvInfo[volEnvPos].y;
volEnvLength = (int) currentInstrument.volumeEnvInfo[volEnvPos].x;
} else {
volEnv = currentInstrument.volumeEnvelopePoints[volEnvPos].y;
volEnvK = 0;
useVolEnv = false;
if (volEnv <= 1) {
currentInstrument = null;
return;
}
}
} else {
volEnv = currentInstrument.volumeEnvelopePoints[volEnvPos].y;
volEnvK = currentInstrument.volumeEnvInfo[volEnvPos].y;
volEnvLength = (int) currentInstrument.volumeEnvInfo[volEnvPos].x;
}
}
volEnvLength--;
} else if (!sustain) { // note != 97
volEnv += volEnvK;
if (volEnvLength <= 0) {
volEnvPos++;
if ((volEnvPos == volEnvSustain && (volEnvType & 0x2) != 0)) {
volEnv = currentInstrument.volumeEnvelopePoints[volEnvPos].y;
volEnvK = currentInstrument.volumeEnvInfo[volEnvPos].y;
volEnvLength = (int) currentInstrument.volumeEnvInfo[volEnvPos].x;
sustain = true;
} else if (volEnvPos == volEnvLoopLen) {
if ((volEnvType & 0x4) != 0) { // loop
volEnvPos = currentInstrument.volLoopStart;
volEnv = currentInstrument.volumeEnvelopePoints[volEnvPos].y;
volEnvLoopLen = currentInstrument.volLoopEnd;
volEnvK = currentInstrument.volumeEnvInfo[volEnvPos].y;
volEnvLength = (int) currentInstrument.volumeEnvInfo[volEnvPos].x;
} else {
volEnv = currentInstrument.volumeEnvelopePoints[volEnvPos].y;
volEnvK = 0;
useVolEnv = false;
}
} else {
volEnv = currentInstrument.volumeEnvelopePoints[volEnvPos].y;
volEnvK = currentInstrument.volumeEnvInfo[volEnvPos].y;
volEnvLength = (int) currentInstrument.volumeEnvInfo[volEnvPos].x;
}
}
volEnvLength--;
}
} else if (currentNote == 97) {
currentInstrument = null;
return;
}
}
final int skip(Pattern pattern, int patternpos) {
int check = pattern.data[patternpos++];
if ((check & 0x80) != 0) {
if ((check & 0x1) != 0) patternpos++;
if ((check & 0x2) != 0) patternpos++;
if ((check & 0x4) != 0) patternpos++;
if ((check & 0x8) != 0) patternpos++;
if ((check & 0x10) != 0) patternpos++;
} else {
patternpos += 4;
}
return patternpos;
}
final int update(Pattern pattern, int patternpos) {
int check = pattern.data[patternpos++];
if ((check & 0x80) != 0) {
// note
if ((check & 0x1) != 0)
currentNote = pattern.data[patternpos++];
// instrument
if ((check & 0x2) != 0) {
currentEffect = -1;
currentInstrument = xm.instrument[pattern.data[patternpos++] - 1];
currentLoopLen = currentInstrument.sample[0].sampleData.length - 1;
currentPitch = calcPitch(currentNote);
currentPos = 0;
currentVolume = 64;
fadeOutVol = 65536;
if (currentInstrument.volumeEnvelopePoints.length != 0) {
volEnv = currentInstrument.volumeEnvelopePoints[0].y;
volEnvPos = 0;
volEnvK = currentInstrument.volumeEnvInfo[volEnvPos].y;
volEnvLength = (int) currentInstrument.volumeEnvInfo[volEnvPos].x;
volEnvSustain = currentInstrument.volSustain;
volEnvType = currentInstrument.volType;
useVolEnv = ((volEnvType & 0x1) != 0);
if ((volEnvType & 0x4) != 0)
volEnvLoopLen = currentInstrument.volLoopEnd;
else
volEnvLoopLen = currentInstrument.volumeEnvelopePoints.length - 1;
sustain = (volEnvPos == volEnvSustain && (volEnvType & 0x2) != 0);
} else {
useVolEnv = false;
volEnv = 64;
volEnvK = 0;
volEnvPos = 0;
}
if (currentInstrument.sample.length == 0) currentInstrument = null;
}
// volume
if ((check & 0x4) != 0) {
int tmp = pattern.data[patternpos++]&0xff;
if (tmp <= 0x50) { // volume
currentVolume = tmp-1;
} else if (tmp < 0x70) { // volume slide down
currentEffect = 0x0A;
currentEffectParam = (tmp - 0x40);
} else if (tmp < 0x80) { // volume slide up
currentEffect = 0x0A;
currentEffectParam = (tmp - 0x70) * 16;
} else if (tmp < 0x90) { // fine volume slide down
currentVolume -= (tmp - 0x80);
} else if (tmp < 0xa0) { // fine volume slide up
currentVolume += (tmp - 0x90);
} else if (tmp < 0xb0) { // vibrato speed
} else if (tmp < 0xc0) { // vibrato
} else if (tmp < 0xd0) { // set panning
} else if (tmp < 0xe0) { // panning slide left
} else if (tmp < 0xf0) { // panning slide right
} else if (tmp >= 0xf0) { // Tone porta
}
}
// effect
if ((check & 0x8) != 0)
currentEffect = pattern.data[patternpos++];
// effect param
if ((check & 0x10) != 0)
currentEffectParam = pattern.data[patternpos++]&0xff;
} else {
currentNote = check;
currentInstrument = xm.instrument[pattern.data[patternpos++] - 1];
currentLoopLen = currentInstrument.sample[0].sampleData.length - 1;
currentVolume = 64;
int tmp = pattern.data[patternpos++]&0xff;
if (tmp <= 0x50) { // volume
currentVolume = tmp;
} else if (tmp < 0x70) { // volume slide down
currentEffect = 0x0A;
currentEffectParam = (tmp - 0x60);
} else if (tmp < 0x80) { // volume slide up
currentEffect = 0x0A;
currentEffectParam = (tmp - 0x70) * 16;
} else if (tmp < 0x90) { // fine volume slide down
currentVolume -= (tmp - 0x80);
} else if (tmp < 0xa0) { // fine volume slide up
currentVolume += (tmp - 0x90);
} else if (tmp < 0xb0) { // vibrato speed
} else if (tmp < 0xc0) { // vibrato
} else if (tmp < 0xd0) { // set panning
} else if (tmp < 0xe0) { // panning slide left
} else if (tmp < 0xf0) { // panning slide right
} else if (tmp >= 0xf0) { // Tone porta
}
currentEffect = pattern.data[patternpos++];
currentEffectParam = pattern.data[patternpos++]&0xff;
fadeOutVol = 65536;
if (currentInstrument.volumeEnvelopePoints.length != 0) {
volEnv = currentInstrument.volumeEnvelopePoints[0].y;
volEnvPos = 0;
volEnvK = currentInstrument.volumeEnvInfo[volEnvPos].y;
volEnvLength = (int) currentInstrument.volumeEnvInfo[volEnvPos].x;
volEnvSustain = currentInstrument.volSustain;
volEnvType = currentInstrument.volType;
if ((volEnvType & 0x4) != 0)
volEnvLoopLen = currentInstrument.volLoopEnd;
else
volEnvLoopLen = currentInstrument.volumeEnvelopePoints.length - 1;
useVolEnv = ((volEnvType & 0x1) != 0);
sustain = (volEnvPos == volEnvSustain && (volEnvType & 0x2) != 0);
} else {
useVolEnv = false;
volEnv = 64;
volEnvK = 0;
volEnvPos = 0;
}
currentPitch = calcPitch(currentNote);
currentPos = 0;
}
rowVol = (( currentVolume / 64f) * 32f);
if (currentInstrument != null) rowVol *= (currentInstrument.sample[0].volume / 64f);
return patternpos;
}
public final void updateTick() {
if (currentEffect != -1) updateEffects();
if (currentInstrument != null) updateVolumes();
}
final void play(int[] buffer, int off, int len) {
if (currentInstrument == null || finalVol < 1 || currentNote == 0) return;
for (int i = off; i < off+len; i++) {
int sample = (int) (currentInstrument.sample[0].sampleData[(int) currentPos] * finalVol);
buffer[i] += (sample & 65535) | (sample << 16);
currentPos += currentPitch;
currentLoopLen += (currentPitch < 0) ? currentPitch : -currentPitch;
if (currentLoopLen <= 0) {
if ((currentInstrument.sample[0].loopType & 0x2) != 0) {
// pingpong loop
currentPitch = -currentPitch;
if (currentPitch < 0) {
currentPos = currentInstrument.sample[0].loopStart + currentInstrument.sample[0].loopEnd-1;
} else {
currentPos = currentInstrument.sample[0].loopStart;
}
currentLoopLen = currentInstrument.sample[0].loopEnd;
} else if ((currentInstrument.sample[0].loopType & 0x1) != 0) {
// forward loop
currentPos = currentInstrument.sample[0].loopStart;
currentLoopLen = currentInstrument.sample[0].loopEnd;
} else {
// no loop
currentInstrument = null;
return;
}
}
}
}
}
/*
* ChangeLog:
* $Log: Channel.java,v $
* Revision 1.8 2000/12/21 17:19:59 fredde
* volumeenvelopes works better, uses precalced k-values,
* pingpong loop fixed
*
* Revision 1.7 2000/10/14 19:09:04 fredde
* changed volume stuff back to 32 since
* sampleData is of type byte[] again
*
* Revision 1.6 2000/10/12 15:04:42 fredde
* fixed volume envelopes after sustain.
* updated volumes to work with (8-bit sample) << 8
*
* Revision 1.5 2000/10/08 18:01:57 fredde
* changes to play the file even better.
*
* Revision 1.4 2000/10/07 13:48:06 fredde
* Lots of fixes to play correct.
* Added volume stuff.
*
* Revision 1.3 2000/10/01 17:06:38 fredde
* basic playing abilities added
*
* Revision 1.2 2000/09/29 19:39:48 fredde
* no need to be public
*
* Revision 1.1.1.1 2000/09/25 16:34:34 fredde
* initial commit
*
*/
| some smaller changes
| silence/format/xm/Channel.java | some smaller changes |
|
Java | lgpl-2.1 | caca861c7bf0485a149088234ead906bc53fee8d | 0 | Zandorum/DimDoors,CannibalVox/DimDoors | package StevenDimDoors.mod_pocketDim.saving;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map.Entry;
import net.minecraftforge.common.DimensionManager;
import StevenDimDoors.mod_pocketDim.Point3D;
import StevenDimDoors.mod_pocketDim.mod_pocketDim;
import StevenDimDoors.mod_pocketDim.core.DimLink;
import StevenDimDoors.mod_pocketDim.core.DimensionType;
import StevenDimDoors.mod_pocketDim.core.LinkType;
import StevenDimDoors.mod_pocketDim.core.NewDimData;
import StevenDimDoors.mod_pocketDim.core.PocketManager;
import StevenDimDoors.mod_pocketDim.dungeon.DungeonData;
import StevenDimDoors.mod_pocketDim.helpers.DungeonHelper;
import StevenDimDoors.mod_pocketDim.util.DDLogger;
import StevenDimDoors.mod_pocketDim.util.FileFilters;
import StevenDimDoors.mod_pocketDim.util.Point4D;
import com.google.common.io.Files;
public class DDSaveHandler
{
public static boolean loadAll()
{
// SenseiKiwi: Loading up our save data is not as simple as just reading files.
// To properly restore dimensions, we need to make sure we always load
// a dimension's parent and root before trying to load it. We'll use
// topological sorting to determine the order in which to recreate the
// dimension objects such that we respect those dependencies.
// Links must be loaded after instantiating all the dimensions and must
// be checked against our dimension blacklist.
// Don't surround this code with try-catch. Our mod should crash if an error
// occurs at this level, since it could lead to some nasty problems.
DDLogger.startTimer("Loading data");
String basePath = DimensionManager.getCurrentSaveRootDirectory() + "/DimensionalDoors/data/";
File dataDirectory = new File(basePath);
// Check if the folder exists. If it doesn't, just return.
if (!dataDirectory.exists())
{
return true;
}
// Load the dimension blacklist
File blacklistFile = new File(basePath+"blacklist.txt");
if(blacklistFile.exists())
{
BlacklistProcessor blacklistReader = new BlacklistProcessor();
List<Integer> blacklist = readBlacklist(blacklistFile,blacklistReader);
PocketManager.createAndRegisterBlacklist(blacklist);
}
// Load the personal pockets mapping
File personalPocketMap = new File(basePath+"personalPockets.txt");
HashMap<String, Integer> ppMap = new HashMap<String, Integer>();
if(personalPocketMap.exists())
{
PersonalPocketMappingProcessor ppMappingProcessor = new PersonalPocketMappingProcessor();
ppMap = readPersonalPocketsMapping(personalPocketMap,ppMappingProcessor);
}
// List any dimension data files and read each dimension
DimDataProcessor reader = new DimDataProcessor();
HashMap<Integer, PackedDimData> packedDims = new HashMap<Integer, PackedDimData>();
FileFilter dataFileFilter = new FileFilters.RegexFileFilter("dim_-?\\d+\\.txt");
File[] dataFiles = dataDirectory.listFiles(dataFileFilter);
for (File dataFile : dataFiles)
{
PackedDimData packedDim = readDimension(dataFile, reader);
if(packedDim == null)
{
throw new IllegalStateException("The DD data for "+dataFile.getName().replace(".txt", "")+" at "+dataFile.getPath()+" is corrupted. Please report this on the MCF or on the DD github issues tracker.");
}
packedDims.put(packedDim.ID,packedDim);
}
List<PackedLinkData> linksToUnpack = new ArrayList<PackedLinkData>();
//get the grand list of all links to unpack
for(PackedDimData packedDim : packedDims.values())
{
linksToUnpack.addAll(packedDim.Links);
}
unpackDimData(packedDims);
unpackLinkData(linksToUnpack);
HashMap<String, NewDimData> personalPocketsMap = new HashMap<String, NewDimData>();
for(Entry<String, Integer> pair : ppMap.entrySet())
{
personalPocketsMap.put(pair.getKey(), PocketManager.getDimensionData(pair.getValue()));
}
PocketManager.setPersonalPocketsMapping(personalPocketsMap);
return true;
}
/**
* Takes a list of packedDimData and rebuilds the DimData for it
* @param packedDims
* @return
*/
public static boolean unpackDimData(HashMap<Integer,PackedDimData> packedDims)
{
LinkedList<Integer> dimsToRegister = new LinkedList<Integer>();
for(PackedDimData packedDim : packedDims.values())
{
//fix pockets without parents
verifyParents(packedDim, packedDims);
//Load roots first by inserting them in the LinkedList first.
if(packedDim.RootID==packedDim.ID)
{
dimsToRegister.addFirst(packedDim.ID);
}
}
//load the children for each root
while(!dimsToRegister.isEmpty())
{
Integer childID = dimsToRegister.pop();
PackedDimData data = packedDims.get(childID);
dimsToRegister.addAll(verifyChildren(data, packedDims));
PocketManager.registerPackedDimData(data);
}
return true;
}
/**
* Fixes the case where a child of a parent has been deleted.
* -removes the child from parent
*
* @param packedDim
* @param packedDims
* @return
*/
private static ArrayList<Integer> verifyChildren(PackedDimData packedDim,HashMap<Integer,PackedDimData> packedDims)
{
ArrayList<Integer> children = new ArrayList<Integer>();
children.addAll(packedDim.ChildIDs);
boolean isMissing = false;
for(Integer childID : packedDim.ChildIDs)
{
if(!packedDims.containsKey(childID))
{
children.remove(childID);
isMissing=true;
}
}
if(isMissing)
{
packedDim=(new PackedDimData(packedDim.ID, packedDim.Depth, packedDim.PackDepth, packedDim.ParentID, packedDim.RootID, packedDim.Orientation, DimensionType.getTypeFromIndex(packedDim.DimensionType), packedDim.IsFilled, packedDim.DungeonData, packedDim.Origin, children, packedDim.Links, packedDim.Tails));
packedDims.put(packedDim.ID, packedDim);
}
return children;
}
/**
* Fixes the case where a child had its parent deleted OR where a parent forgot about its child
* -Changes the missing parent to the dims root if its original parent is gone.
* -Finds the new parent and adds it to its list of children or reminds the old parent if it forgot its child
*
* @param packedDim
* @param packedDims
*/
public static void verifyParents(PackedDimData packedDim,HashMap<Integer,PackedDimData> packedDims)
{
ArrayList<Integer> fosterChildren = new ArrayList<Integer>();
fosterChildren.add(packedDim.ID);
DimensionType type = DimensionType.getTypeFromIndex(packedDim.DimensionType);
//fix pockets without parents
if(!packedDims.containsKey(packedDim.ParentID))
{
//Fix the orphan by changing its root to its parent, re-connecting it to the list
packedDim=(new PackedDimData(packedDim.ID, 1, packedDim.PackDepth, packedDim.RootID, packedDim.RootID, packedDim.Orientation,type, packedDim.IsFilled, packedDim.DungeonData, packedDim.Origin, packedDim.ChildIDs, packedDim.Links, packedDim.Tails));
packedDims.put(packedDim.ID, packedDim);
}
//fix pockets whose parents have forgotten about them
PackedDimData fosterParent = packedDims.get(packedDim.ParentID);
if(!fosterParent.ChildIDs.contains(packedDim.ID)&&packedDim.ID!=packedDim.RootID)
{
//find the root, and fix it by adding the orphan's ID to its children
fosterChildren.addAll(fosterParent.ChildIDs);
fosterParent=(new PackedDimData(fosterParent.ID, fosterParent.Depth, fosterParent.PackDepth, fosterParent.ParentID, fosterParent.RootID, fosterParent.Orientation, type, fosterParent.IsFilled, fosterParent.DungeonData, fosterParent.Origin, fosterChildren, fosterParent.Links, fosterParent.Tails));
packedDims.put(fosterParent.ID, fosterParent);
}
}
public static boolean unpackLinkData(List<PackedLinkData> linksToUnpack)
{
Point3D fakePoint = new Point3D(-1,-1,-1);
List<PackedLinkData> unpackedLinks = new ArrayList<PackedLinkData>();
/**
* sort through the list, unpacking links that do not have parents.
*/
//TODO- what we have a loop of links?
for(PackedLinkData packedLink : linksToUnpack)
{
if(packedLink.parent.equals(fakePoint))
{
NewDimData data = PocketManager.getDimensionData(packedLink.source.getDimension());
LinkType linkType = LinkType.getLinkTypeFromIndex(packedLink.tail.linkType);
DimLink link = data.createLink(packedLink.source, linkType, packedLink.orientation, packedLink.lock);
Point4D destination = packedLink.tail.destination;
if(destination!=null)
{
PocketManager.createDimensionDataDangerously(destination.getDimension()).setLinkDestination(link, destination.getX(),destination.getY(),destination.getZ());
}
unpackedLinks.add(packedLink);
}
}
linksToUnpack.removeAll(unpackedLinks);
//unpack remaining children
while(!linksToUnpack.isEmpty())
{
for(PackedLinkData packedLink : linksToUnpack)
{
NewDimData data = PocketManager.createDimensionDataDangerously(packedLink.source.getDimension());
if(data.getLink(packedLink.parent)!=null)
{
data.createChildLink(packedLink.source, data.getLink(packedLink.parent), packedLink.lock);
}
unpackedLinks.add(packedLink);
}
linksToUnpack.removeAll(unpackedLinks);
}
return true;
}
private static PackedDimData readDimension(File dataFile, DimDataProcessor reader)
{
try
{
return reader.readFromFile(dataFile);
}
catch (Exception e)
{
System.err.println("Could not read dimension data from: " + dataFile.getAbsolutePath());
System.err.println("The following error occurred:");
printException(e, false);
return null;
}
}
public static boolean saveAll(Iterable<? extends IPackable<PackedDimData>> dimensions,
List<Integer> blacklist, boolean checkModified) throws IOException
{
// Create the data directory for our dimensions
// Don't catch exceptions here. If we can't create this folder,
// the mod should crash to let the user know early on.
// Get the save directory path
File saveDirectory = new File(mod_pocketDim.instance.getCurrentSavePath() + "/DimensionalDoors/data/");
String savePath = saveDirectory.getAbsolutePath();
String baseSavePath = savePath + "/dim_";
File backupDirectory = new File(savePath + "/backup");
String baseBackupPath = backupDirectory.getAbsolutePath() + "/dim_";
if (!saveDirectory.exists())
{
// Create the save directory
Files.createParentDirs(saveDirectory);
saveDirectory.mkdir();
}
if (!backupDirectory.exists())
{
// Create the backup directory
backupDirectory.mkdir();
}
// Create and write the blackList
writeBlacklist(blacklist, savePath);
//create and write personal pocket mapping
writePersonalPocketMap(PocketManager.getPersonalPocketMapping(), savePath);
// Write the dimension save data
boolean succeeded = true;
DimDataProcessor writer = new DimDataProcessor();
for (IPackable<PackedDimData> dimension : dimensions)
{
// Check if the dimension should be saved
if (!checkModified || dimension.isModified())
{
if (writeDimension(dimension, writer, baseSavePath, baseBackupPath))
{
dimension.clearModified();
}
else
{
succeeded = false;
}
}
}
return succeeded;
}
private static boolean writeBlacklist(List<Integer> blacklist, String savePath)
{
try
{
BlacklistProcessor writer = new BlacklistProcessor();
File tempFile = new File(savePath + "/blacklist.tmp");
File saveFile = new File(savePath + "/blacklist.txt");
writer.writeToFile(tempFile, blacklist);
saveFile.delete();
tempFile.renameTo(saveFile);
return true;
}
catch (Exception e)
{
System.err.println("Could not save blacklist. The following error occurred:");
printException(e, true);
return false;
}
}
private static boolean writePersonalPocketMap(HashMap<String, NewDimData> hashMap, String savePath)
{
try
{
HashMap<String, Integer> ppMap = new HashMap<String, Integer>();
for(Entry<String, NewDimData> pair : hashMap.entrySet())
{
ppMap.put(pair.getKey(), pair.getValue().id());
}
PersonalPocketMappingProcessor writer = new PersonalPocketMappingProcessor();
File tempFile = new File(savePath + "/personalPockets.tmp");
File saveFile = new File(savePath + "/personalPockets.txt");
writer.writeToFile(tempFile, ppMap);
saveFile.delete();
tempFile.renameTo(saveFile);
return true;
}
catch (Exception e)
{
System.err.println("Could not save personal pockets mapping. The following error occurred:");
printException(e, true);
return false;
}
}
private static boolean writeDimension(IPackable<PackedDimData> dimension, DimDataProcessor writer, String basePath, String backupPath)
{
try
{
File saveFile = new File(basePath + dimension.name() + ".txt");
// If the save file already exists, back it up.
if (saveFile.exists())
{
Files.move(saveFile, new File(backupPath + dimension.name() + ".txt"));
}
writer.writeToFile(saveFile, dimension.pack());
return true;
}
catch (Exception e)
{
System.err.println("Could not save data for dimension #" + dimension.name() + ". The following error occurred:");
printException(e, true);
return false;
}
}
private static void printException(Exception e, boolean verbose)
{
if (e.getCause() == null)
{
if (verbose)
{
e.printStackTrace();
}
else
{
System.err.println(e.getMessage());
}
}
else
{
System.out.println(e.getMessage());
System.err.println("Caused by an underlying error:");
if (verbose)
{
e.getCause().printStackTrace();
}
else
{
System.err.println(e.getCause().getMessage());
}
}
}
//TODO - make this more robust
public static DungeonData unpackDungeonData(PackedDungeonData packedDungeon)
{
for(DungeonData data : DungeonHelper.instance().getRegisteredDungeons())
{
if(data.schematicName().equals(packedDungeon.SchematicName))
{
return data;
}
}
return null;
}
public static List<Integer> readBlacklist(File blacklistFile, BlacklistProcessor reader)
{
try
{
List<Integer> list = reader.readFromFile(blacklistFile);
if (list == null)
return new ArrayList<Integer>(0);
}
catch (Exception e)
{
e.printStackTrace();
return new ArrayList<Integer>(0);
}
}
public static HashMap<String,Integer> readPersonalPocketsMapping(File ppMap, PersonalPocketMappingProcessor reader)
{
try
{
return reader.readFromFile(ppMap);
}
catch (Exception e)
{
e.printStackTrace();
return null;
}
}
}
| src/main/java/StevenDimDoors/mod_pocketDim/saving/DDSaveHandler.java | package StevenDimDoors.mod_pocketDim.saving;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map.Entry;
import net.minecraftforge.common.DimensionManager;
import StevenDimDoors.mod_pocketDim.Point3D;
import StevenDimDoors.mod_pocketDim.mod_pocketDim;
import StevenDimDoors.mod_pocketDim.core.DimLink;
import StevenDimDoors.mod_pocketDim.core.DimensionType;
import StevenDimDoors.mod_pocketDim.core.LinkType;
import StevenDimDoors.mod_pocketDim.core.NewDimData;
import StevenDimDoors.mod_pocketDim.core.PocketManager;
import StevenDimDoors.mod_pocketDim.dungeon.DungeonData;
import StevenDimDoors.mod_pocketDim.helpers.DungeonHelper;
import StevenDimDoors.mod_pocketDim.util.DDLogger;
import StevenDimDoors.mod_pocketDim.util.FileFilters;
import StevenDimDoors.mod_pocketDim.util.Point4D;
import com.google.common.io.Files;
public class DDSaveHandler
{
public static boolean loadAll()
{
// SenseiKiwi: Loading up our save data is not as simple as just reading files.
// To properly restore dimensions, we need to make sure we always load
// a dimension's parent and root before trying to load it. We'll use
// topological sorting to determine the order in which to recreate the
// dimension objects such that we respect those dependencies.
// Links must be loaded after instantiating all the dimensions and must
// be checked against our dimension blacklist.
// Don't surround this code with try-catch. Our mod should crash if an error
// occurs at this level, since it could lead to some nasty problems.
DDLogger.startTimer("Loading data");
String basePath = DimensionManager.getCurrentSaveRootDirectory() + "/DimensionalDoors/data/";
File dataDirectory = new File(basePath);
// Check if the folder exists. If it doesn't, just return.
if (!dataDirectory.exists())
{
return true;
}
// Load the dimension blacklist
File blacklistFile = new File(basePath+"blacklist.txt");
if(blacklistFile.exists())
{
BlacklistProcessor blacklistReader = new BlacklistProcessor();
List<Integer> blacklist = readBlacklist(blacklistFile,blacklistReader);
PocketManager.createAndRegisterBlacklist(blacklist);
}
// Load the personal pockets mapping
File personalPocketMap = new File(basePath+"personalPockets.txt");
HashMap<String, Integer> ppMap = new HashMap<String, Integer>();
if(personalPocketMap.exists())
{
PersonalPocketMappingProcessor ppMappingProcessor = new PersonalPocketMappingProcessor();
ppMap = readPersonalPocketsMapping(personalPocketMap,ppMappingProcessor);
}
// List any dimension data files and read each dimension
DimDataProcessor reader = new DimDataProcessor();
HashMap<Integer, PackedDimData> packedDims = new HashMap<Integer, PackedDimData>();
FileFilter dataFileFilter = new FileFilters.RegexFileFilter("dim_-?\\d+\\.txt");
File[] dataFiles = dataDirectory.listFiles(dataFileFilter);
for (File dataFile : dataFiles)
{
PackedDimData packedDim = readDimension(dataFile, reader);
if(packedDim == null)
{
throw new IllegalStateException("The DD data for "+dataFile.getName().replace(".txt", "")+" at "+dataFile.getPath()+" is corrupted. Please report this on the MCF or on the DD github issues tracker.");
}
packedDims.put(packedDim.ID,packedDim);
}
List<PackedLinkData> linksToUnpack = new ArrayList<PackedLinkData>();
//get the grand list of all links to unpack
for(PackedDimData packedDim : packedDims.values())
{
linksToUnpack.addAll(packedDim.Links);
}
unpackDimData(packedDims);
unpackLinkData(linksToUnpack);
HashMap<String, NewDimData> personalPocketsMap = new HashMap<String, NewDimData>();
for(Entry<String, Integer> pair : ppMap.entrySet())
{
personalPocketsMap.put(pair.getKey(), PocketManager.getDimensionData(pair.getValue()));
}
PocketManager.setPersonalPocketsMapping(personalPocketsMap);
return true;
}
/**
* Takes a list of packedDimData and rebuilds the DimData for it
* @param packedDims
* @return
*/
public static boolean unpackDimData(HashMap<Integer,PackedDimData> packedDims)
{
LinkedList<Integer> dimsToRegister = new LinkedList<Integer>();
for(PackedDimData packedDim : packedDims.values())
{
//fix pockets without parents
verifyParents(packedDim, packedDims);
//Load roots first by inserting them in the LinkedList first.
if(packedDim.RootID==packedDim.ID)
{
dimsToRegister.addFirst(packedDim.ID);
}
}
//load the children for each root
while(!dimsToRegister.isEmpty())
{
Integer childID = dimsToRegister.pop();
PackedDimData data = packedDims.get(childID);
dimsToRegister.addAll(verifyChildren(data, packedDims));
PocketManager.registerPackedDimData(data);
}
return true;
}
/**
* Fixes the case where a child of a parent has been deleted.
* -removes the child from parent
*
* @param packedDim
* @param packedDims
* @return
*/
private static ArrayList<Integer> verifyChildren(PackedDimData packedDim,HashMap<Integer,PackedDimData> packedDims)
{
ArrayList<Integer> children = new ArrayList<Integer>();
children.addAll(packedDim.ChildIDs);
boolean isMissing = false;
for(Integer childID : packedDim.ChildIDs)
{
if(!packedDims.containsKey(childID))
{
children.remove(childID);
isMissing=true;
}
}
if(isMissing)
{
packedDim=(new PackedDimData(packedDim.ID, packedDim.Depth, packedDim.PackDepth, packedDim.ParentID, packedDim.RootID, packedDim.Orientation, DimensionType.getTypeFromIndex(packedDim.DimensionType), packedDim.IsFilled, packedDim.DungeonData, packedDim.Origin, children, packedDim.Links, packedDim.Tails));
packedDims.put(packedDim.ID, packedDim);
}
return children;
}
/**
* Fixes the case where a child had its parent deleted OR where a parent forgot about its child
* -Changes the missing parent to the dims root if its original parent is gone.
* -Finds the new parent and adds it to its list of children or reminds the old parent if it forgot its child
*
* @param packedDim
* @param packedDims
*/
public static void verifyParents(PackedDimData packedDim,HashMap<Integer,PackedDimData> packedDims)
{
ArrayList<Integer> fosterChildren = new ArrayList<Integer>();
fosterChildren.add(packedDim.ID);
DimensionType type = DimensionType.getTypeFromIndex(packedDim.DimensionType);
//fix pockets without parents
if(!packedDims.containsKey(packedDim.ParentID))
{
//Fix the orphan by changing its root to its parent, re-connecting it to the list
packedDim=(new PackedDimData(packedDim.ID, 1, packedDim.PackDepth, packedDim.RootID, packedDim.RootID, packedDim.Orientation,type, packedDim.IsFilled, packedDim.DungeonData, packedDim.Origin, packedDim.ChildIDs, packedDim.Links, packedDim.Tails));
packedDims.put(packedDim.ID, packedDim);
}
//fix pockets whose parents have forgotten about them
PackedDimData fosterParent = packedDims.get(packedDim.ParentID);
if(!fosterParent.ChildIDs.contains(packedDim.ID)&&packedDim.ID!=packedDim.RootID)
{
//find the root, and fix it by adding the orphan's ID to its children
fosterChildren.addAll(fosterParent.ChildIDs);
fosterParent=(new PackedDimData(fosterParent.ID, fosterParent.Depth, fosterParent.PackDepth, fosterParent.ParentID, fosterParent.RootID, fosterParent.Orientation, type, fosterParent.IsFilled, fosterParent.DungeonData, fosterParent.Origin, fosterChildren, fosterParent.Links, fosterParent.Tails));
packedDims.put(fosterParent.ID, fosterParent);
}
}
public static boolean unpackLinkData(List<PackedLinkData> linksToUnpack)
{
Point3D fakePoint = new Point3D(-1,-1,-1);
List<PackedLinkData> unpackedLinks = new ArrayList<PackedLinkData>();
/**
* sort through the list, unpacking links that do not have parents.
*/
//TODO- what we have a loop of links?
for(PackedLinkData packedLink : linksToUnpack)
{
if(packedLink.parent.equals(fakePoint))
{
NewDimData data = PocketManager.getDimensionData(packedLink.source.getDimension());
LinkType linkType = LinkType.getLinkTypeFromIndex(packedLink.tail.linkType);
DimLink link = data.createLink(packedLink.source, linkType, packedLink.orientation, packedLink.lock);
Point4D destination = packedLink.tail.destination;
if(destination!=null)
{
PocketManager.createDimensionDataDangerously(destination.getDimension()).setLinkDestination(link, destination.getX(),destination.getY(),destination.getZ());
}
unpackedLinks.add(packedLink);
}
}
linksToUnpack.removeAll(unpackedLinks);
//unpack remaining children
while(!linksToUnpack.isEmpty())
{
for(PackedLinkData packedLink : linksToUnpack)
{
NewDimData data = PocketManager.createDimensionDataDangerously(packedLink.source.getDimension());
if(data.getLink(packedLink.parent)!=null)
{
data.createChildLink(packedLink.source, data.getLink(packedLink.parent), packedLink.lock);
}
unpackedLinks.add(packedLink);
}
linksToUnpack.removeAll(unpackedLinks);
}
return true;
}
private static PackedDimData readDimension(File dataFile, DimDataProcessor reader)
{
try
{
return reader.readFromFile(dataFile);
}
catch (Exception e)
{
System.err.println("Could not read dimension data from: " + dataFile.getAbsolutePath());
System.err.println("The following error occurred:");
printException(e, false);
return null;
}
}
public static boolean saveAll(Iterable<? extends IPackable<PackedDimData>> dimensions,
List<Integer> blacklist, boolean checkModified) throws IOException
{
// Create the data directory for our dimensions
// Don't catch exceptions here. If we can't create this folder,
// the mod should crash to let the user know early on.
// Get the save directory path
File saveDirectory = new File(mod_pocketDim.instance.getCurrentSavePath() + "/DimensionalDoors/data/");
String savePath = saveDirectory.getAbsolutePath();
String baseSavePath = savePath + "/dim_";
File backupDirectory = new File(savePath + "/backup");
String baseBackupPath = backupDirectory.getAbsolutePath() + "/dim_";
if (!saveDirectory.exists())
{
// Create the save directory
Files.createParentDirs(saveDirectory);
saveDirectory.mkdir();
}
if (!backupDirectory.exists())
{
// Create the backup directory
backupDirectory.mkdir();
}
// Create and write the blackList
writeBlacklist(blacklist, savePath);
//create and write personal pocket mapping
writePersonalPocketMap(PocketManager.getPersonalPocketMapping(), savePath);
// Write the dimension save data
boolean succeeded = true;
DimDataProcessor writer = new DimDataProcessor();
for (IPackable<PackedDimData> dimension : dimensions)
{
// Check if the dimension should be saved
if (!checkModified || dimension.isModified())
{
if (writeDimension(dimension, writer, baseSavePath, baseBackupPath))
{
dimension.clearModified();
}
else
{
succeeded = false;
}
}
}
return succeeded;
}
private static boolean writeBlacklist(List<Integer> blacklist, String savePath)
{
try
{
BlacklistProcessor writer = new BlacklistProcessor();
File tempFile = new File(savePath + "/blacklist.tmp");
File saveFile = new File(savePath + "/blacklist.txt");
writer.writeToFile(tempFile, blacklist);
saveFile.delete();
tempFile.renameTo(saveFile);
return true;
}
catch (Exception e)
{
System.err.println("Could not save blacklist. The following error occurred:");
printException(e, true);
return false;
}
}
private static boolean writePersonalPocketMap(HashMap<String, NewDimData> hashMap, String savePath)
{
try
{
HashMap<String, Integer> ppMap = new HashMap<String, Integer>();
for(Entry<String, NewDimData> pair : hashMap.entrySet())
{
ppMap.put(pair.getKey(), pair.getValue().id());
}
PersonalPocketMappingProcessor writer = new PersonalPocketMappingProcessor();
File tempFile = new File(savePath + "/personalPockets.tmp");
File saveFile = new File(savePath + "/personalPockets.txt");
writer.writeToFile(tempFile, ppMap);
saveFile.delete();
tempFile.renameTo(saveFile);
return true;
}
catch (Exception e)
{
System.err.println("Could not save personal pockets mapping. The following error occurred:");
printException(e, true);
return false;
}
}
private static boolean writeDimension(IPackable<PackedDimData> dimension, DimDataProcessor writer, String basePath, String backupPath)
{
try
{
File saveFile = new File(basePath + dimension.name() + ".txt");
// If the save file already exists, back it up.
if (saveFile.exists())
{
Files.move(saveFile, new File(backupPath + dimension.name() + ".txt"));
}
writer.writeToFile(saveFile, dimension.pack());
return true;
}
catch (Exception e)
{
System.err.println("Could not save data for dimension #" + dimension.name() + ". The following error occurred:");
printException(e, true);
return false;
}
}
private static void printException(Exception e, boolean verbose)
{
if (e.getCause() == null)
{
if (verbose)
{
e.printStackTrace();
}
else
{
System.err.println(e.getMessage());
}
}
else
{
System.out.println(e.getMessage());
System.err.println("Caused by an underlying error:");
if (verbose)
{
e.getCause().printStackTrace();
}
else
{
System.err.println(e.getCause().getMessage());
}
}
}
//TODO - make this more robust
public static DungeonData unpackDungeonData(PackedDungeonData packedDungeon)
{
for(DungeonData data : DungeonHelper.instance().getRegisteredDungeons())
{
if(data.schematicName().equals(packedDungeon.SchematicName))
{
return data;
}
}
return null;
}
public static List<Integer> readBlacklist(File blacklistFile, BlacklistProcessor reader)
{
try
{
return reader.readFromFile(blacklistFile);
}
catch (Exception e)
{
e.printStackTrace();
return null;
}
}
public static HashMap<String,Integer> readPersonalPocketsMapping(File ppMap, PersonalPocketMappingProcessor reader)
{
try
{
return reader.readFromFile(ppMap);
}
catch (Exception e)
{
e.printStackTrace();
return null;
}
}
}
| Fixes #35
If there is a problem reading the blacklist file from disk, previously,
null would be returned from the DDSaveHandler.readBlacklist method. This
would result in a crash later on down the line when we tried to iterate
the blacklist. Now we return an empty array. I'd like to be able to
diagnose the issues causing the blacklist file to be corrupted, but
unfortunately when this issue crops up, people only post the crash :(
| src/main/java/StevenDimDoors/mod_pocketDim/saving/DDSaveHandler.java | Fixes #35 |
|
Java | lgpl-2.1 | 2c7a7e7dcacae9a3740edc123de537b64e63aa72 | 0 | jensopetersen/exist,olvidalo/exist,wshager/exist,patczar/exist,RemiKoutcherawy/exist,windauer/exist,jensopetersen/exist,opax/exist,dizzzz/exist,adamretter/exist,lcahlander/exist,windauer/exist,adamretter/exist,wshager/exist,dizzzz/exist,hungerburg/exist,jessealama/exist,joewiz/exist,wshager/exist,adamretter/exist,zwobit/exist,MjAbuz/exist,lcahlander/exist,shabanovd/exist,olvidalo/exist,RemiKoutcherawy/exist,wolfgangmm/exist,ljo/exist,patczar/exist,lcahlander/exist,shabanovd/exist,patczar/exist,ambs/exist,windauer/exist,wolfgangmm/exist,olvidalo/exist,MjAbuz/exist,zwobit/exist,kohsah/exist,windauer/exist,patczar/exist,zwobit/exist,MjAbuz/exist,joewiz/exist,adamretter/exist,jensopetersen/exist,wshager/exist,patczar/exist,olvidalo/exist,olvidalo/exist,dizzzz/exist,dizzzz/exist,ambs/exist,jensopetersen/exist,joewiz/exist,MjAbuz/exist,RemiKoutcherawy/exist,ljo/exist,wshager/exist,kohsah/exist,jessealama/exist,windauer/exist,hungerburg/exist,eXist-db/exist,ljo/exist,eXist-db/exist,wolfgangmm/exist,dizzzz/exist,shabanovd/exist,wolfgangmm/exist,opax/exist,jessealama/exist,jessealama/exist,kohsah/exist,kohsah/exist,eXist-db/exist,wolfgangmm/exist,ambs/exist,ambs/exist,jensopetersen/exist,eXist-db/exist,RemiKoutcherawy/exist,ambs/exist,wolfgangmm/exist,hungerburg/exist,hungerburg/exist,eXist-db/exist,kohsah/exist,patczar/exist,MjAbuz/exist,hungerburg/exist,lcahlander/exist,shabanovd/exist,joewiz/exist,joewiz/exist,lcahlander/exist,jensopetersen/exist,wshager/exist,zwobit/exist,opax/exist,eXist-db/exist,shabanovd/exist,jessealama/exist,dizzzz/exist,zwobit/exist,ljo/exist,windauer/exist,joewiz/exist,adamretter/exist,lcahlander/exist,RemiKoutcherawy/exist,ljo/exist,adamretter/exist,RemiKoutcherawy/exist,MjAbuz/exist,kohsah/exist,ambs/exist,shabanovd/exist,jessealama/exist,opax/exist,opax/exist,ljo/exist,zwobit/exist | /*
* eXist Open Source Native XML Database
* Copyright (C) 2009 The eXist Project
* http://exist-db.org
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
* $Id:$
*/
package org.exist.debuggee.dgbp.packets;
import java.util.List;
import org.apache.mina.core.session.IoSession;
import org.exist.xquery.Expression;
/**
* @author <a href="mailto:[email protected]">Dmitriy Shabanov</a>
*
*/
public class StackGet extends Command {
private Integer stackDepth = null;
private List<Expression> stacks;
public StackGet(IoSession session, String args) {
super(session, args);
}
protected void setArgument(String arg, String val) {
if (arg.equals("d"))
stackDepth = Integer.parseInt(val);
else
super.setArgument(arg, val);
}
public byte[] toBytes() {
String response = "" +
"<response " +
"command=\"stack_get\" " +
"transaction_id=\""+transactionID+"\">\n";
if (stackDepth != null) {
int index = stacks.size() - 1 - stackDepth;
if (index >=0 && index < stacks.size())
response += stackToString(index);
} else {
for (int index = stacks.size()-1; index >= 0; index--)
response += stackToString(index);
}
response += "</response>";
return response.getBytes();
}
private String stackToString(int index) {
if (stacks == null || stacks.size() == 0)
return "";
Expression expr = stacks.get(index);
int level = stacks.size() - index - 1;
return "<stack level=\""+String.valueOf(level)+"\" " +
"lineno=\""+expr.getLine()+"\" " +
"type=\"file\" " +
"filename=\""+getFileuri(expr.getSource())+"\" />";
// +
// "where=\"\" " +
// "cmdbegin=\""+expr.getLine()+":"+expr.getColumn()+"\" " +
// "cmdend=\""+(expr.getLine())+":"+(expr.getColumn()+1)+"\"/>";
}
/* (non-Javadoc)
* @see org.exist.debuggee.dgbp.packets.Command#exec()
*/
@Override
public void exec() {
stacks = getJoint().stackGet();
}
}
| extensions/debuggee/src/org/exist/debuggee/dgbp/packets/StackGet.java | /*
* eXist Open Source Native XML Database
* Copyright (C) 2009 The eXist Project
* http://exist-db.org
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
* $Id:$
*/
package org.exist.debuggee.dgbp.packets;
import java.util.List;
import org.apache.mina.core.session.IoSession;
import org.exist.xquery.Expression;
/**
* @author <a href="mailto:[email protected]">Dmitriy Shabanov</a>
*
*/
public class StackGet extends Command {
private int stackDepth = 0;
private List<Expression> stacks;
public StackGet(IoSession session, String args) {
super(session, args);
}
protected void setArgument(String arg, String val) {
if (arg.equals("d"))
stackDepth = Integer.parseInt(val);
else
super.setArgument(arg, val);
}
public byte[] toBytes() {
String response = "" +
"<response " +
"command=\"stack_get\" " +
"transaction_id=\""+transactionID+"\">\n";
for (int depth = 0; depth < stacks.size(); depth++) {
response += stackToString(depth);
}
response += "</response>";
return response.getBytes();
}
private String stackToString(int stackDepth) {
if (stacks == null || stacks.size() == 0)
return "";
Expression expr = stacks.get(stackDepth);
return "<stack level=\""+String.valueOf(stackDepth+1)+"\" " +
"lineno=\""+expr.getLine()+"\" " +
"type=\"file\" " +
"filename=\""+getFileuri(expr.getSource())+"\" />";
// +
// "where=\"\" " +
// "cmdbegin=\""+expr.getLine()+":"+expr.getColumn()+"\" " +
// "cmdend=\""+(expr.getLine())+":"+(expr.getColumn()+1)+"\"/>";
}
/* (non-Javadoc)
* @see org.exist.debuggee.dgbp.packets.Command#exec()
*/
@Override
public void exec() {
stacks = getJoint().stackGet();
}
}
| [ignore] The current context is stack depth of zero, the 'oldest' context is the highest numbered context.
svn path=/trunk/eXist/; revision=10162
| extensions/debuggee/src/org/exist/debuggee/dgbp/packets/StackGet.java | [ignore] The current context is stack depth of zero, the 'oldest' context is the highest numbered context. |
|
Java | apache-2.0 | 1958cd7b8c462ad9dd261e7e3e30115233d7e102 | 0 | isandlaTech/cohorte-remote-services,ahmadshahwan/cohorte-remote-services | /**
*
*/
package org.cohorte.ecf.provider.jabsorb.client;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import org.cohorte.ecf.provider.jabsorb.Activator;
import org.cohorte.ecf.provider.jabsorb.JabsorbConstants;
import org.cohorte.ecf.provider.jabsorb.Utilities;
import org.cohorte.remote.utilities.BundlesClassLoader;
import org.eclipse.ecf.core.util.ECFException;
import org.eclipse.ecf.remoteservice.IRemoteCall;
import org.eclipse.ecf.remoteservice.client.AbstractClientContainer;
import org.eclipse.ecf.remoteservice.client.AbstractClientService;
import org.eclipse.ecf.remoteservice.client.IRemoteCallable;
import org.eclipse.ecf.remoteservice.client.RemoteServiceClientRegistration;
import org.jabsorb.ng.client.Client;
import org.jabsorb.ng.client.ISession;
import org.jabsorb.ng.client.TransportRegistry;
/**
* Jabsorb remote service client
*
* @author Thomas Calmant
*/
public class JabsorbClientService extends AbstractClientService {
/** The Jabsorb client */
private final Client pClient;
/** Service interfaces */
private final List<Class<?>> pInterfaces = new LinkedList<Class<?>>();
/** A classloader that walks through bundles */
private final ClassLoader pLoader;
/** The service proxy */
private final Object pProxy;
/**
* Sets up the client
*
* @param aContainer
* @param aRegistration
* @throws Exception
* Error preparing the proxy
*/
public JabsorbClientService(final AbstractClientContainer aContainer,
final RemoteServiceClientRegistration aRegistration)
throws ECFException {
super(aContainer, aRegistration);
// Setup the class loader
pLoader = new BundlesClassLoader(Activator.getContext());
// Setup the client
pClient = setupClient();
// Setup the proxy
pProxy = createProxy();
}
/**
* Creates a proxy for this service
*
* @return A proxy object
* @throws ECFException
* Error generating the endpoint name, or no service interface
* found
*/
private Object createProxy() throws ECFException {
// Load service classes
pInterfaces.clear();
for (final String className : registration.getClazzes()) {
try {
pInterfaces.add(pLoader.loadClass(className));
} catch (final ClassNotFoundException ex) {
// Ignore unknown class
System.err.println("Class not loaded: " + className);
}
}
// If not class has been loaded, raise an error
if (pInterfaces.isEmpty()) {
throw new ECFException("No class found in: "
+ Arrays.toString(registration.getClazzes()));
}
// Create the proxy
return pClient.openProxy(Utilities.getEndpointName(registration),
pLoader, pInterfaces.toArray(new Class<?>[0]));
}
/**
* Clean up
*
* @see org.eclipse.ecf.remoteservice.AbstractRemoteService#dispose()
*/
@Override
public void dispose() {
// Close the proxy
pClient.closeProxy(pProxy);
// Clean up the list of classes
pInterfaces.clear();
super.dispose();
}
/**
* Looks for the first method in the service interfaces that has the same
* name and number of arguments.
*
* @param aFullMethodName
* A method name, that be prefixed with the full class name
* @return A method object or null
*/
private Method getMethod(final String aFullMethodName, final int aNbArgs) {
// Extracted method name
String methodName;
// List of classes where to find the method
final List<Class<?>> classes = new LinkedList<Class<?>>();
// Separate class and method names
final int methodIndex = aFullMethodName.lastIndexOf('.');
if (methodIndex == -1) {
// Not found, got the method name only
methodName = aFullMethodName;
// Look into all classes
classes.addAll(pInterfaces);
} else {
// Separate names
final String className = aFullMethodName.substring(0, methodIndex);
methodName = aFullMethodName.substring(methodIndex + 1);
// Find the valid class
for (final Class<?> clazz : pInterfaces) {
if (clazz.getName().equals(className)) {
// Look into only one class
classes.add(clazz);
break;
}
}
}
// Look for the method object
for (final Class<?> clazz : pInterfaces) {
final Method[] methods = clazz.getMethods();
for (final Method method : methods) {
// Test method name and number of arguments
if (Modifier.isPublic(method.getModifiers())
&& method.getName().equals(methodName)
&& method.getParameterTypes().length == aNbArgs) {
// Found a match
return method;
}
}
}
// Method not found
return null;
}
/*
* (non-Javadoc)
*
* @see
* org.eclipse.ecf.remoteservice.client.AbstractClientService#invokeRemoteCall
* (org.eclipse.ecf.remoteservice.IRemoteCall,
* org.eclipse.ecf.remoteservice.client.IRemoteCallable)
*/
@Override
protected Object invokeRemoteCall(final IRemoteCall aCall,
final IRemoteCallable aCallable) throws ECFException {
// Normalize parameters
Object[] parameters = aCall.getParameters();
if (parameters == null) {
parameters = new Object[0];
}
// Look for the method
final Method method = getMethod(aCall.getMethod(), parameters.length);
if (method == null) {
throw new ECFException("Can't find a method called "
+ aCall.getMethod() + " with " + parameters.length
+ " arguments");
}
try {
// Call the method
return pClient.invoke(pProxy, method, aCall.getParameters());
} catch (final Throwable ex) {
// Encapsulate the exception
throw new ECFException("Error calling remote method: "
+ ex.getMessage(), ex);
}
}
/**
* Sets up the client according to the registration
*/
private Client setupClient() {
// Get the accesses
final String[] accesses = (String[]) registration
.getProperty(JabsorbConstants.PROP_HTTP_ACCESSES);
// Get the first one
// FIXME: get the first **valid** one...
final String uri = accesses[0];
System.out.println("Accesses: " + Arrays.toString(accesses));
System.out.println("Chosen..: " + uri);
// Prepare the session
final ISession session = TransportRegistry.i().createSession(uri);
// Set up the client
return new Client(session, pLoader);
}
}
| org.cohorte.ecf.provider.jabsorb/src/org/cohorte/ecf/provider/jabsorb/client/JabsorbClientService.java | /**
*
*/
package org.cohorte.ecf.provider.jabsorb.client;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import org.cohorte.ecf.provider.jabsorb.Activator;
import org.cohorte.ecf.provider.jabsorb.JabsorbConstants;
import org.cohorte.ecf.provider.jabsorb.Utilities;
import org.cohorte.remote.utilities.BundlesClassLoader;
import org.eclipse.ecf.core.util.ECFException;
import org.eclipse.ecf.remoteservice.IRemoteCall;
import org.eclipse.ecf.remoteservice.client.AbstractClientContainer;
import org.eclipse.ecf.remoteservice.client.AbstractClientService;
import org.eclipse.ecf.remoteservice.client.IRemoteCallable;
import org.eclipse.ecf.remoteservice.client.RemoteServiceClientRegistration;
import org.jabsorb.ng.client.Client;
import org.jabsorb.ng.client.ISession;
import org.jabsorb.ng.client.TransportRegistry;
/**
* Jabsorb remote service client
*
* @author Thomas Calmant
*/
public class JabsorbClientService extends AbstractClientService {
/** The Jabsorb client */
private final Client pClient;
/** Service interfaces */
private final List<Class<?>> pInterfaces = new LinkedList<Class<?>>();
/** A classloader that walks through bundles */
private final ClassLoader pLoader;
/** The service proxy */
private final Object pProxy;
/**
* Sets up the client
*
* @param aContainer
* @param aRegistration
* @throws Exception
* Error preparing the proxy
*/
public JabsorbClientService(final AbstractClientContainer aContainer,
final RemoteServiceClientRegistration aRegistration)
throws ECFException {
super(aContainer, aRegistration);
// Setup the class loader
pLoader = new BundlesClassLoader(Activator.getContext());
// Setup the client
pClient = setupClient();
// Setup the proxy
pProxy = createProxy();
}
/**
* Creates a proxy for this service
*
* @return A proxy object
* @throws ECFException
* Error generating the endpoint name, or no service interface
* found
*/
private Object createProxy() throws ECFException {
// Load service classes
pInterfaces.clear();
for (final String className : registration.getClazzes()) {
try {
pInterfaces.add(pLoader.loadClass(className));
} catch (final ClassNotFoundException ex) {
// Ignore unknown class
System.err.println("Class not loaded: " + className);
}
}
// If not class has been loaded, raise an error
if (pInterfaces.isEmpty()) {
throw new ECFException("No class found in: "
+ Arrays.toString(registration.getClazzes()));
}
// Create the proxy
return pClient.openProxy(Utilities.getEndpointName(registration),
pLoader, pInterfaces.toArray(new Class<?>[0]));
}
/**
* Clean up
*
* @see org.eclipse.ecf.remoteservice.AbstractRemoteService#dispose()
*/
@Override
public void dispose() {
// Close the proxy
pClient.closeProxy(pProxy);
// Clean up the list of classes
pInterfaces.clear();
super.dispose();
}
/**
* Looks for the first method in the service interfaces that has the same
* name and number of arguments.
*
* @param aMethodName
* A method name
* @return A method object or null
*/
private Method getMethod(final String aMethodName, final int aNbArgs) {
for (final Class<?> clazz : pInterfaces) {
final Method[] methods = clazz.getMethods();
for (final Method method : methods) {
// Test method name and number of arguments
// Interface methods are public, so no need to check for them
if (method.getName().equals(aMethodName)
&& method.getParameterTypes().length == aNbArgs) {
// Found a match
return method;
}
}
}
// Method not found
return null;
}
/*
* (non-Javadoc)
*
* @see
* org.eclipse.ecf.remoteservice.client.AbstractClientService#invokeRemoteCall
* (org.eclipse.ecf.remoteservice.IRemoteCall,
* org.eclipse.ecf.remoteservice.client.IRemoteCallable)
*/
@Override
protected Object invokeRemoteCall(final IRemoteCall aCall,
final IRemoteCallable aCallable) throws ECFException {
// Look for the method
final Method method = getMethod(aCall.getMethod(),
aCall.getParameters().length);
if (method == null) {
throw new ECFException("Can't find a method called "
+ aCall.getMethod() + " with "
+ aCall.getParameters().length + " arguments");
}
try {
// Call the method
return pClient.invoke(pProxy, method, aCall.getParameters());
} catch (final Throwable ex) {
// Encapsulate the exception
throw new ECFException("Error calling remote method: "
+ ex.getMessage(), ex);
}
}
/**
* Sets up the client according to the registration
*/
private Client setupClient() {
// Get the accesses
final String[] accesses = (String[]) registration
.getProperty(JabsorbConstants.PROP_HTTP_ACCESSES);
// Get the first one
// FIXME: get the first **valid** one...
final String uri = accesses[0];
System.out.println("Accesses: " + Arrays.toString(accesses));
System.out.println("Chosen..: " + uri);
// Prepare the session
final ISession session = TransportRegistry.i().createSession(uri);
// Set up the client
return new Client(session, pLoader);
}
}
| Corrected Method look up in the client service | org.cohorte.ecf.provider.jabsorb/src/org/cohorte/ecf/provider/jabsorb/client/JabsorbClientService.java | Corrected Method look up in the client service |
|
Java | apache-2.0 | 57c32b20e50d038f72ad534d3d4138e56b927e5c | 0 | billy1380/blogwt,billy1380/blogwt,billy1380/blogwt | //
// RoleHelper.java
// blogwt
//
// Created by William Shakour (billy1380) on 11 May 2015.
// Copyright © 2015 WillShex Limited. All rights reserved.
//
package com.willshex.blogwt.shared.helper;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import com.willshex.blogwt.shared.api.datatype.Role;
/**
* @author William Shakour (billy1380)
*
*/
public class RoleHelper {
public static final String ADMIN = "ADM";
public static final String ADMIN_NAME = "System Administrator";
public static final String ADMIN_DESCRIPTION = "allows users to perform any action (implicitly contains all permissions)";
public static Role createAdmin () {
return create(ADMIN);
}
public static Role createFullAdmin () {
return createFull(ADMIN, ADMIN_NAME, ADMIN_DESCRIPTION);
}
public static Map<String, Role> toLookup (Collection<Role> roles) {
Map<String, Role> lookup = new HashMap<String, Role>();
for (Role role : roles) {
lookup.put(role.code, role);
}
return lookup;
}
public static Role createFull (String code, String name, String description) {
return new Role().code(code).name(name).description(description);
}
public static Role create (String code) {
return new Role().code(code);
}
}
| src/main/java/com/willshex/blogwt/shared/helper/RoleHelper.java | //
// RoleHelper.java
// blogwt
//
// Created by William Shakour (billy1380) on 11 May 2015.
// Copyright © 2015 WillShex Limited. All rights reserved.
//
package com.willshex.blogwt.shared.helper;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import com.willshex.blogwt.shared.api.datatype.Role;
/**
* @author William Shakour (billy1380)
*
*/
public class RoleHelper {
public static final String ADMIN = "ADM";
public static final String ADMIN_NAME = "System Administrator";
public static final String ADMIN_DESCRIPTION = "allows users to perform any action (implicitly contains all permissions)";
public static Role createAdmin () {
return create(ADMIN);
}
public static Map<String, Role> toLookup (Collection<Role> roles) {
Map<String, Role> lookup = new HashMap<String, Role>();
for (Role role : roles) {
lookup.put(role.code, role);
}
return lookup;
}
public static Role createFull (String code, String name, String description) {
return new Role().code(code).name(name).description(description);
}
public static Role create (String code) {
return new Role().code(code);
}
}
| added create full admin method to role helper | src/main/java/com/willshex/blogwt/shared/helper/RoleHelper.java | added create full admin method to role helper |
|
Java | apache-2.0 | ce428737f2ebecd4fb8f73d0dc3d2d9b42fd8e97 | 0 | exKAZUu/SimulatedMapReduce | package jp.ac.nii.exercise6;
import java.nio.file.Paths;
import java.util.Comparator;
import jp.ac.nii.mapreduceframework.FileInputFormat;
import jp.ac.nii.mapreduceframework.FileOutputFormat;
import jp.ac.nii.mapreduceframework.HashPartitioner;
import jp.ac.nii.mapreduceframework.Job;
import jp.ac.nii.mapreduceframework.NullWritable;
/**
* 以下の式の関連度を計算するジョブのJobです。
* 関連度 = 商品Xと商品Yのペアの総数 / 商品Xを含むペアの総数
* TODO: このファイルは未完成です!
*/
public class RelativityCalculationJob {
/**
* このメソッドは完成しています。
*/
public static Job<Long, String, String, String, NullWritable, String> create() {
Job<Long, String, String, String, NullWritable, String> job = Job.getInstance();
job.setMapperClass(RelativityCaclulationMapper.class);
job.setReducerClass(RelativityCalculationReducer.class);
job.setInputFormatClass(FileInputFormat.class);
job.setOutputFormatClass(FileOutputFormat.class);
FileInputFormat.addInputPath(job, Paths.get(FileNameConstants.DENOMINATION));
FileInputFormat.addInputPath(job, Paths.get(FileNameConstants.NUMERATOR));
FileOutputFormat.setOutputPath(job, Paths.get(FileNameConstants.RELATED_GOODS));
// TODO: ずっと下にある RelativityCalculationSortComparator, RelativityCalculationPartitioner,
// RelativityCalculationGroupComparator クラスを修正しよう!
// ヒント: RelativityCaclulationMapperクラスをよく読もう
// キーの並び順をどうするか(Reduceタスクの割り当て前のキーのソート処理の制御)
job.setSortComparatorClass(RelativityCalculationSortComparator.class);
// どのReduceタスクでキー(と対応するバリュー)を処理するか(Reduceタスクの割り当て制御)
job.setPartitionerClass(RelativityCalculationPartitioner.class);
// どのキーとどのキーを同一とみなして、Reducerのバリューリストに集約するか(Reduceの処理単位の制御)
job.setGroupingComparatorClass(RelativityCalculationGroupComparator.class);
job.setNumReduceTasks(10);
return job;
}
public static String removeSharpD(String key) {
String keyStr = key.toString();
if (keyStr.endsWith("#d")) {
return keyStr.substring(0, keyStr.length() - 2);
}
return key;
}
/**
* 以下のように分母データと分子データが入り乱れているので、
* <あんドーナツ, ところてん,1200(注:分子データ)>, <あんドーナツ#d, 5400(注:分母データ)>, <あんドーナツ, 生シュークリーム,2000(注:分子データ)>
* 「あんドーナツ」と「あんドーナツ#d」が同じキーと見なして、同じReduceタスクで処理されるようにハッシュ計算処理を制御する。
*/
public static class RelativityCalculationPartitioner extends HashPartitioner<String, String> {
@Override
public int getPartition(String key, String value, int numReduceTasks) {
// TODO: removeSharp()とsuper.getPartition()メソッドを活用しよう
return 0; // 注意: return 0; は誤りです
}
}
/**
* 以下のように分母データと分子データが入り乱れているので、
* <あんドーナツ, ところてん,1200(注:分子データ)>, <あんドーナツ#d, 5400(注:分母データ)>, <あんドーナツ, 生シュークリーム,2000(注:分子データ)>
* 「あんドーナツ」と「あんドーナツ#d」を同じキーと見なして、バリューリストにまとめられるように比較処理を制御する。
*/
public static class RelativityCalculationGroupComparator implements Comparator<String> {
@Override
public int compare(String a, String b) {
// TODO: removeSharp()とString.compareTo()メソッドを活用しよう
return 0; // 注意: return 0; は誤りです
}
}
/**
* 以下のように分母データと分子データが入り乱れているので、
* <あんドーナツ, ところてん,1200(注:分子データ)>, <あんドーナツ#d, 5400(注:分母データ)>, <あんドーナツ, 生シュークリーム,2000(注:分子データ)>
* キーに対するソート時の比較処理を制御することで、以下のように分母データが先頭に来るようにする。
* <あんドーナツ#d, 5400(注:分母データ)>, <あんドーナツ, ところてん,1200(注:分子データ)>, <あんドーナツ, 生シュークリーム,2000(注:分子データ)>
*/
public static class RelativityCalculationSortComparator implements Comparator<String> {
@Override
public int compare(String a, String b) {
// TODO: String.compareTo()メソッドを活用しよう
return 0; // 注意: return 0; は誤りです
}
}
}
| src/main/java/jp/ac/nii/exercise6/RelativityCalculationJob.java | package jp.ac.nii.exercise6;
import java.nio.file.Paths;
import java.util.Comparator;
import jp.ac.nii.mapreduceframework.FileInputFormat;
import jp.ac.nii.mapreduceframework.FileOutputFormat;
import jp.ac.nii.mapreduceframework.HashPartitioner;
import jp.ac.nii.mapreduceframework.Job;
import jp.ac.nii.mapreduceframework.NullWritable;
/**
* 以下の式の関連度を計算するジョブのJobです。
* 関連度 = 商品Xと商品Yのペアの総数 / 商品Xを含むペアの総数
* TODO: このファイルは未完成です!
*/
public class RelativityCalculationJob {
public static Job<Long, String, String, String, NullWritable, String> create() {
Job<Long, String, String, String, NullWritable, String> job = Job.getInstance();
job.setMapperClass(RelativityCaclulationMapper.class);
job.setReducerClass(RelativityCalculationReducer.class);
job.setInputFormatClass(FileInputFormat.class);
job.setOutputFormatClass(FileOutputFormat.class);
FileInputFormat.addInputPath(job, Paths.get(FileNameConstants.DENOMINATION));
FileInputFormat.addInputPath(job, Paths.get(FileNameConstants.NUMERATOR));
FileOutputFormat.setOutputPath(job, Paths.get(FileNameConstants.RELATED_GOODS));
// TODO: RelativityCalculationSortComparator, RelativityCalculationPartitioner,
// RelativityCalculationGroupComparator クラスを修正しよう!
// ヒント: RelativityCaclulationMapperクラスをよく読もう
// キーの並び順をどうするか(Reduceタスクの割り当て前のキーのソート処理の制御)
job.setSortComparatorClass(RelativityCalculationSortComparator.class);
// どのReduceタスクでキー(と対応するバリュー)を処理するか(Reduceタスクの割り当て制御)
job.setPartitionerClass(RelativityCalculationPartitioner.class);
// どのキーとどのキーを同一とみなして、Reducerのバリューリストに集約するか(Reduceの処理単位の制御)
job.setGroupingComparatorClass(RelativityCalculationGroupComparator.class);
job.setNumReduceTasks(10);
return job;
}
public static String removeSharpD(String key) {
String keyStr = key.toString();
if (keyStr.endsWith("#d")) {
return keyStr.substring(0, keyStr.length() - 2);
}
return key;
}
/**
* 以下のように分母データと分子データが入り乱れているので、
* <あんドーナツ, ところてん,1200(注:分子データ)>, <あんドーナツ#d, 5400(注:分母データ)>, <あんドーナツ, 生シュークリーム,2000(注:分子データ)>
* 「あんドーナツ」と「あんドーナツ#d」が同じキーと見なして、同じReduceタスクで処理されるようにハッシュ計算処理を制御する。
*/
public static class RelativityCalculationPartitioner extends HashPartitioner<String, String> {
@Override
public int getPartition(String key, String value, int numReduceTasks) {
// TODO: removeSharp()とsuper.getPartition()メソッドを活用しよう
return 0; // 注意: return 0; は誤りです
}
}
/**
* 以下のように分母データと分子データが入り乱れているので、
* <あんドーナツ, ところてん,1200(注:分子データ)>, <あんドーナツ#d, 5400(注:分母データ)>, <あんドーナツ, 生シュークリーム,2000(注:分子データ)>
* 「あんドーナツ」と「あんドーナツ#d」を同じキーと見なして、バリューリストにまとめられるように比較処理を制御する。
*/
public static class RelativityCalculationGroupComparator implements Comparator<String> {
@Override
public int compare(String a, String b) {
// TODO: removeSharp()とString.compareTo()メソッドを活用しよう
return 0; // 注意: return 0; は誤りです
}
}
/**
* 以下のように分母データと分子データが入り乱れているので、
* <あんドーナツ, ところてん,1200(注:分子データ)>, <あんドーナツ#d, 5400(注:分母データ)>, <あんドーナツ, 生シュークリーム,2000(注:分子データ)>
* キーに対するソート時の比較処理を制御することで、以下のように分母データが先頭に来るようにする。
* <あんドーナツ#d, 5400(注:分母データ)>, <あんドーナツ, ところてん,1200(注:分子データ)>, <あんドーナツ, 生シュークリーム,2000(注:分子データ)>
*/
public static class RelativityCalculationSortComparator implements Comparator<String> {
@Override
public int compare(String a, String b) {
// TODO: String.compareTo()メソッドを活用しよう
return 0; // 注意: return 0; は誤りです
}
}
}
| Enhance comments.
| src/main/java/jp/ac/nii/exercise6/RelativityCalculationJob.java | Enhance comments. |
|
Java | apache-2.0 | a344b6211894e488b5a99a263fac184385af1c45 | 0 | wso2-extensions/siddhi-io-kafka,ramindu90/siddhi-io-kafka | /*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.siddhi.extension.output.transport.tcp;
import junit.framework.Assert;
import org.apache.log4j.Logger;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.wso2.siddhi.core.ExecutionPlanRuntime;
import org.wso2.siddhi.core.SiddhiManager;
import org.wso2.siddhi.core.event.Event;
import org.wso2.siddhi.core.exception.ExecutionPlanCreationException;
import org.wso2.siddhi.core.stream.input.InputHandler;
import org.wso2.siddhi.query.api.definition.Attribute;
import org.wso2.siddhi.query.api.definition.StreamDefinition;
import org.wso2.siddhi.query.api.exception.ExecutionPlanValidationException;
import org.wso2.siddhi.tcp.transport.TCPNettyServer;
import org.wso2.siddhi.tcp.transport.callback.StreamListener;
import org.wso2.siddhi.tcp.transport.config.ServerConfig;
import java.util.ArrayList;
public class TCPOutputTransportTestCase {
static final Logger log = Logger.getLogger(TCPOutputTransportTestCase.class);
private volatile int count;
private volatile int count1;
private volatile boolean eventArrived;
@Before
public void init() {
count = 0;
count1 = 0;
eventArrived = false;
}
@Test
public void testTcpOutputTransport1() throws InterruptedException {
log.info("tcpInputTransport TestCase 1");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='foo', @map(type='passThrough')) " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
StreamDefinition streamDefinition = StreamDefinition.id("foo").attribute("a", Attribute.Type.STRING)
.attribute("b", Attribute.Type.INT).attribute("c", Attribute.Type.FLOAT).attribute("d", Attribute.Type.LONG)
.attribute("e", Attribute.Type.DOUBLE).attribute("f", Attribute.Type.BOOL);
TCPNettyServer tcpNettyServer = new TCPNettyServer();
tcpNettyServer.addStreamListener(new StreamListener() {
@Override
public StreamDefinition getStreamDefinition() {
return streamDefinition;
}
@Override
public void onEvent(Event event) {
System.out.println(event);
eventArrived = true;
count++;
switch (count) {
case 1:
Assert.assertEquals("test", event.getData(0));
break;
case 2:
Assert.assertEquals("test1", event.getData(0));
break;
case 3:
Assert.assertEquals("test2", event.getData(0));
break;
default:
org.junit.Assert.fail();
}
}
@Override
public void onEvents(Event[] events) {
for (Event event : events) {
onEvent(event);
}
}
});
tcpNettyServer.bootServer(new ServerConfig());
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
ArrayList<Event> arrayList = new ArrayList<Event>();
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test1", 361, 31.0f, 3801l, 231.0, false}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test2", 362, 32.0f, 3802l, 232.0, true}));
inputHandler.send(arrayList.toArray(new Event[3]));
Thread.sleep(300);
Assert.assertEquals(3, count);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
tcpNettyServer.shutdownGracefully();
}
@Test(expected = ExecutionPlanValidationException.class)
public void testTcpOutputTransport2() throws InterruptedException {
log.info("tcpInputTransport TestCase 2");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', @map(type='passThrough')) " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
Thread.sleep(300);
executionPlanRuntime.shutdown();
}
@Test
public void testTcpOutputTransport3() throws InterruptedException {
log.info("tcpInputTransport TestCase 3");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='foo', host='127.0.0.1', port='9766', @map(type='passThrough')) " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
StreamDefinition streamDefinition = StreamDefinition.id("foo").attribute("a", Attribute.Type.STRING)
.attribute("b", Attribute.Type.INT).attribute("c", Attribute.Type.FLOAT).attribute("d", Attribute.Type.LONG)
.attribute("e", Attribute.Type.DOUBLE).attribute("f", Attribute.Type.BOOL);
TCPNettyServer tcpNettyServer = new TCPNettyServer();
tcpNettyServer.addStreamListener(new StreamListener() {
@Override
public StreamDefinition getStreamDefinition() {
return streamDefinition;
}
@Override
public void onEvent(Event event) {
System.out.println(event);
eventArrived = true;
count++;
switch (count) {
case 1:
Assert.assertEquals("test", event.getData(0));
break;
case 2:
Assert.assertEquals("test1", event.getData(0));
break;
case 3:
Assert.assertEquals("test2", event.getData(0));
break;
default:
org.junit.Assert.fail();
}
}
@Override
public void onEvents(Event[] events) {
for (Event event : events) {
onEvent(event);
}
}
});
ServerConfig serverConfig = new ServerConfig();
serverConfig.setPort(9766);
serverConfig.setHost("127.0.0.1");
tcpNettyServer.bootServer(serverConfig);
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
ArrayList<Event> arrayList = new ArrayList<Event>();
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test1", 361, 31.0f, 3801l, 231.0, false}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test2", 362, 32.0f, 3802l, 232.0, true}));
inputHandler.send(arrayList.toArray(new Event[3]));
Thread.sleep(300);
Assert.assertEquals(3, count);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
tcpNettyServer.shutdownGracefully();
}
@Test
public void testTcpOutputTransport4() throws InterruptedException {
log.info("tcpInputTransport TestCase 4");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='foo', port='9766', @map(type='passThrough')) " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
StreamDefinition streamDefinition = StreamDefinition.id("foo").attribute("a", Attribute.Type.STRING)
.attribute("b", Attribute.Type.INT).attribute("c", Attribute.Type.FLOAT).attribute("d", Attribute.Type.LONG)
.attribute("e", Attribute.Type.DOUBLE).attribute("f", Attribute.Type.BOOL);
TCPNettyServer tcpNettyServer = new TCPNettyServer();
tcpNettyServer.addStreamListener(new StreamListener() {
@Override
public StreamDefinition getStreamDefinition() {
return streamDefinition;
}
@Override
public void onEvent(Event event) {
System.out.println(event);
eventArrived = true;
count++;
switch (count) {
case 1:
Assert.assertEquals("test", event.getData(0));
break;
case 2:
Assert.assertEquals("test1", event.getData(0));
break;
case 3:
Assert.assertEquals("test2", event.getData(0));
break;
default:
org.junit.Assert.fail();
}
}
@Override
public void onEvents(Event[] events) {
for (Event event : events) {
onEvent(event);
}
}
});
ServerConfig serverConfig = new ServerConfig();
serverConfig.setPort(9766);
serverConfig.setHost("127.0.0.1");
tcpNettyServer.bootServer(serverConfig);
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
ArrayList<Event> arrayList = new ArrayList<Event>();
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test1", 361, 31.0f, 3801l, 231.0, false}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test2", 362, 32.0f, 3802l, 232.0, true}));
inputHandler.send(arrayList.toArray(new Event[3]));
Thread.sleep(300);
Assert.assertEquals(3, count);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
tcpNettyServer.shutdownGracefully();
}
@Test
public void testTcpOutputTransport5() throws InterruptedException {
log.info("tcpInputTransport TestCase 5");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='foo', port='9766', @map(type='passThrough')) " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
StreamDefinition streamDefinition = StreamDefinition.id("foo").attribute("a", Attribute.Type.STRING)
.attribute("b", Attribute.Type.INT).attribute("c", Attribute.Type.FLOAT).attribute("d", Attribute.Type.LONG)
.attribute("e", Attribute.Type.DOUBLE).attribute("f", Attribute.Type.BOOL);
TCPNettyServer tcpNettyServer = new TCPNettyServer();
tcpNettyServer.addStreamListener(new StreamListener() {
@Override
public StreamDefinition getStreamDefinition() {
return streamDefinition;
}
@Override
public void onEvent(Event event) {
System.out.println(event);
eventArrived = true;
count++;
switch (count) {
case 1:
Assert.assertEquals("test", event.getData(0));
break;
case 2:
Assert.assertEquals("test1", event.getData(0));
break;
case 3:
Assert.assertEquals("test2", event.getData(0));
break;
default:
org.junit.Assert.fail();
}
}
@Override
public void onEvents(Event[] events) {
for (Event event : events) {
onEvent(event);
}
}
});
ServerConfig serverConfig = new ServerConfig();
serverConfig.setPort(9766);
serverConfig.setHost("127.0.0.1");
tcpNettyServer.bootServer(serverConfig);
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
ArrayList<Event> arrayList = new ArrayList<Event>();
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test1", 361, 31.0f, 3801l, 231.0, false}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test2", 362, 32.0f, 3802l, 232.0, true}));
inputHandler.send(arrayList.toArray(new Event[3]));
Thread.sleep(300);
Assert.assertEquals(3, count);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
tcpNettyServer.shutdownGracefully();
}
@Test(expected = ExecutionPlanCreationException.class)
public void testTcpOutputTransport6() throws InterruptedException {
log.info("tcpInputTransport TestCase 6");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='foo', host='127.0.0.1', port='9766', @map(type='text')) " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = null;
try {
executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
} finally {
if (executionPlanRuntime != null) {
executionPlanRuntime.shutdown();
}
}
}
@Test
public void testTcpOutputTransport7() throws InterruptedException {
log.info("tcpInputTransport TestCase 7");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='foo') " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
StreamDefinition streamDefinition = StreamDefinition.id("foo").attribute("a", Attribute.Type.STRING)
.attribute("b", Attribute.Type.INT).attribute("c", Attribute.Type.FLOAT).attribute("d", Attribute.Type.LONG)
.attribute("e", Attribute.Type.DOUBLE).attribute("f", Attribute.Type.BOOL);
TCPNettyServer tcpNettyServer = new TCPNettyServer();
tcpNettyServer.addStreamListener(new StreamListener() {
@Override
public StreamDefinition getStreamDefinition() {
return streamDefinition;
}
@Override
public void onEvent(Event event) {
System.out.println(event);
eventArrived = true;
count++;
switch (count) {
case 1:
Assert.assertEquals("test", event.getData(0));
break;
case 2:
Assert.assertEquals("test1", event.getData(0));
break;
case 3:
Assert.assertEquals("test2", event.getData(0));
break;
default:
org.junit.Assert.fail();
}
}
@Override
public void onEvents(Event[] events) {
for (Event event : events) {
onEvent(event);
}
}
});
tcpNettyServer.bootServer(new ServerConfig());
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
ArrayList<Event> arrayList = new ArrayList<Event>();
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test1", 361, 31.0f, 3801l, 231.0, false}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test2", 362, 32.0f, 3802l, 232.0, true}));
inputHandler.send(arrayList.toArray(new Event[3]));
Thread.sleep(300);
Assert.assertEquals(3, count);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
tcpNettyServer.shutdownGracefully();
}
@Test
public void testTcpOutputTransport8() throws InterruptedException {
log.info("tcpInputTransport TestCase 8");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='{{a}}') " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
StreamDefinition streamDefinition = StreamDefinition.id("foo").attribute("a", Attribute.Type.STRING)
.attribute("b", Attribute.Type.INT).attribute("c", Attribute.Type.FLOAT).attribute("d", Attribute.Type.LONG)
.attribute("e", Attribute.Type.DOUBLE).attribute("f", Attribute.Type.BOOL);
TCPNettyServer tcpNettyServer = new TCPNettyServer();
tcpNettyServer.addStreamListener(new StreamListener() {
@Override
public StreamDefinition getStreamDefinition() {
return streamDefinition;
}
@Override
public void onEvent(Event event) {
System.out.println(event);
eventArrived = true;
count++;
switch (count) {
case 1:
Assert.assertEquals("foo", event.getData(0));
break;
case 2:
Assert.assertEquals("foo", event.getData(0));
break;
default:
org.junit.Assert.fail();
}
}
@Override
public void onEvents(Event[] events) {
for (Event event : events) {
onEvent(event);
}
}
});
tcpNettyServer.bootServer(new ServerConfig());
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
ArrayList<Event> arrayList = new ArrayList<Event>();
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"bar", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"bar", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"foo", 361, 31.0f, 3801l, 231.0, false}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"foo", 362, 32.0f, 3802l, 232.0, true}));
inputHandler.send(arrayList.toArray(new Event[4]));
Thread.sleep(300);
Assert.assertEquals(2, count);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
tcpNettyServer.shutdownGracefully();
}
@Test
public void testTcpOutputTransport9() throws InterruptedException {
log.info("tcpInputTransport TestCase 9");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='bar', @map(type='passThrough')) " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
StreamDefinition streamDefinition = StreamDefinition.id("foo").attribute("a", Attribute.Type.STRING)
.attribute("b", Attribute.Type.INT).attribute("c", Attribute.Type.FLOAT).attribute("d", Attribute.Type.LONG)
.attribute("e", Attribute.Type.DOUBLE).attribute("f", Attribute.Type.BOOL);
TCPNettyServer tcpNettyServer = new TCPNettyServer();
tcpNettyServer.addStreamListener(new StreamListener() {
@Override
public StreamDefinition getStreamDefinition() {
return streamDefinition;
}
@Override
public void onEvent(Event event) {
System.out.println(event);
eventArrived = true;
}
@Override
public void onEvents(Event[] events) {
for (Event event : events) {
onEvent(event);
}
}
});
tcpNettyServer.bootServer(new ServerConfig());
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
ArrayList<Event> arrayList = new ArrayList<Event>();
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test1", 361, 31.0f, 3801l, 231.0, false}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test2", 362, 32.0f, 3802l, 232.0, true}));
inputHandler.send(arrayList.toArray(new Event[3]));
Thread.sleep(300);
Assert.assertFalse(eventArrived);
executionPlanRuntime.shutdown();
tcpNettyServer.shutdownGracefully();
}
@Ignore
@Test
//todo validate log
public void testTcpOutputTransport10() throws InterruptedException {
log.info("tcpInputTransport TestCase 10");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='bar', @map(type='passThrough')) " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
ArrayList<Event> arrayList = new ArrayList<Event>();
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test1", 361, 31.0f, 3801l, 231.0, false}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test2", 362, 32.0f, 3802l, 232.0, true}));
inputHandler.send(arrayList.toArray(new Event[3]));
Thread.sleep(300);
Assert.assertFalse(eventArrived);
executionPlanRuntime.shutdown();
}
@Test(expected = ExecutionPlanCreationException.class)
public void testTcpOutputTransport11() throws InterruptedException {
log.info("tcpInputTransport TestCase 11");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='foo', host='127.0.0.1', port='{{d}}') " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = null;
try {
executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
} finally {
if (executionPlanRuntime != null) {
executionPlanRuntime.shutdown();
}
}
}
@Test(expected = ExecutionPlanCreationException.class)
public void testTcpOutputTransport12() throws InterruptedException {
log.info("tcpInputTransport TestCase 12");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='foo', host='{{a}}') " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = null;
try {
executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
} finally {
if (executionPlanRuntime != null) {
executionPlanRuntime.shutdown();
}
}
}
@Test
public void testTcpOutputTransport13() throws InterruptedException {
log.info("tcpInputTransport TestCase 13");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='foo') " +
"define stream outputStream1 (a string, b int, c float, d long, e double, f bool);" +
"@sink(type='tcp', context='foo') " +
"define stream outputStream2 (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"" +
"from inputStream " +
"select * " +
"insert into outputStream1; " +
"" +
"from inputStream " +
"select * " +
"insert into outputStream2; " +
"");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
StreamDefinition streamDefinition = StreamDefinition.id("foo").attribute("a", Attribute.Type.STRING)
.attribute("b", Attribute.Type.INT).attribute("c", Attribute.Type.FLOAT).attribute("d", Attribute.Type.LONG)
.attribute("e", Attribute.Type.DOUBLE).attribute("f", Attribute.Type.BOOL);
TCPNettyServer tcpNettyServer = new TCPNettyServer();
tcpNettyServer.addStreamListener(new StreamListener() {
@Override
public StreamDefinition getStreamDefinition() {
return streamDefinition;
}
@Override
public void onEvent(Event event) {
System.out.println(event);
eventArrived = true;
count++;
/*
commenting this out since we cannot guarantee an event order here
switch (count) {
case 1:
Assert.assertEquals("test", event.getData(0));
break;
case 2:
Assert.assertEquals("test1", event.getData(0));
break;
case 3:
Assert.assertEquals("test2", event.getData(0));
break;
case 4:
Assert.assertEquals("test", event.getData(0));
break;
case 5:
Assert.assertEquals("test1", event.getData(0));
break;
case 6:
Assert.assertEquals("test2", event.getData(0));
break;
default:
org.junit.Assert.fail();
}*/
}
@Override
public void onEvents(Event[] events) {
for (Event event : events) {
onEvent(event);
}
}
});
tcpNettyServer.bootServer(new ServerConfig());
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
ArrayList<Event> arrayList = new ArrayList<Event>();
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test1", 361, 31.0f, 3801l, 231.0, false}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test2", 362, 32.0f, 3802l, 232.0, true}));
inputHandler.send(arrayList.toArray(new Event[3]));
Thread.sleep(3000);
Assert.assertEquals(6, count);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
tcpNettyServer.shutdownGracefully();
}
@Test
public void testTcpOutputTransport14() throws InterruptedException {
log.info("tcpInputTransport TestCase 14");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='foo1', port='9854') " +
"@sink(type='tcp', context='foo2') " +
"define stream outputStream(a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"" +
"from inputStream " +
"select * " +
"insert into outputStream; " +
"");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
StreamDefinition streamDefinition1 = StreamDefinition.id("foo1").attribute("a", Attribute.Type.STRING)
.attribute("b", Attribute.Type.INT).attribute("c", Attribute.Type.FLOAT).attribute("d", Attribute.Type.LONG)
.attribute("e", Attribute.Type.DOUBLE).attribute("f", Attribute.Type.BOOL);
StreamDefinition streamDefinition2 = StreamDefinition.id("foo2").attribute("a", Attribute.Type.STRING)
.attribute("b", Attribute.Type.INT).attribute("c", Attribute.Type.FLOAT).attribute("d", Attribute.Type.LONG)
.attribute("e", Attribute.Type.DOUBLE).attribute("f", Attribute.Type.BOOL);
TCPNettyServer tcpNettyServer1 = new TCPNettyServer();
TCPNettyServer tcpNettyServer2 = new TCPNettyServer();
tcpNettyServer1.addStreamListener(new StreamListener() {
@Override
public StreamDefinition getStreamDefinition() {
return streamDefinition1;
}
@Override
public void onEvent(Event event) {
System.out.println(event);
eventArrived = true;
count++;
switch (count) {
case 1:
Assert.assertEquals("test", event.getData(0));
break;
case 2:
Assert.assertEquals("test1", event.getData(0));
break;
case 3:
Assert.assertEquals("test2", event.getData(0));
break;
default:
org.junit.Assert.fail();
}
}
@Override
public void onEvents(Event[] events) {
for (Event event : events) {
onEvent(event);
}
}
});
tcpNettyServer2.addStreamListener(new StreamListener() {
@Override
public StreamDefinition getStreamDefinition() {
return streamDefinition2;
}
@Override
public void onEvent(Event event) {
System.out.println(event);
eventArrived = true;
count1++;
switch (count1) {
case 1:
Assert.assertEquals("test", event.getData(0));
break;
case 2:
Assert.assertEquals("test1", event.getData(0));
break;
case 3:
Assert.assertEquals("test2", event.getData(0));
break;
default:
org.junit.Assert.fail();
}
}
@Override
public void onEvents(Event[] events) {
for (Event event : events) {
onEvent(event);
}
}
});
ServerConfig serverConfig = new ServerConfig();
serverConfig.setPort(9854);
tcpNettyServer1.bootServer(serverConfig);
tcpNettyServer2.bootServer(new ServerConfig());
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
ArrayList<Event> arrayList = new ArrayList<Event>();
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test1", 361, 31.0f, 3801l, 231.0, false}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test2", 362, 32.0f, 3802l, 232.0, true}));
inputHandler.send(arrayList.toArray(new Event[3]));
Thread.sleep(3000);
Assert.assertEquals(3, count);
Assert.assertEquals(3, count1);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
tcpNettyServer1.shutdownGracefully();
tcpNettyServer2.shutdownGracefully();
}
@Test
public void testTcpOutputTransport15() throws InterruptedException {
log.info("tcpInputTransport TestCase 15");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"@plan:name('foo') " +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='foo/inputStream1') " +
"define stream outputStream(a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"" +
"from inputStream " +
"select * " +
"insert into outputStream;" +
" " +
"");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
StreamDefinition streamDefinition1 = StreamDefinition.id("foo/inputStream1").attribute("a", Attribute.Type.STRING)
.attribute("b", Attribute.Type.INT).attribute("c", Attribute.Type.FLOAT).attribute("d", Attribute.Type.LONG)
.attribute("e", Attribute.Type.DOUBLE).attribute("f", Attribute.Type.BOOL);
TCPNettyServer tcpNettyServer1 = new TCPNettyServer();
tcpNettyServer1.addStreamListener(new StreamListener() {
@Override
public StreamDefinition getStreamDefinition() {
return streamDefinition1;
}
@Override
public void onEvent(Event event) {
System.out.println(event);
eventArrived = true;
count++;
switch (count) {
case 1:
Assert.assertEquals("test", event.getData(0));
break;
case 2:
Assert.assertEquals("test1", event.getData(0));
break;
case 3:
Assert.assertEquals("test2", event.getData(0));
break;
default:
org.junit.Assert.fail();
}
}
@Override
public void onEvents(Event[] events) {
for (Event event : events) {
onEvent(event);
}
}
});
tcpNettyServer1.bootServer(new ServerConfig());
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
ArrayList<Event> arrayList = new ArrayList<Event>();
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test1", 361, 31.0f, 3801l, 231.0, false}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test2", 362, 32.0f, 3802l, 232.0, true}));
inputHandler.send(arrayList.toArray(new Event[3]));
Thread.sleep(3000);
Assert.assertEquals(3, count);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
tcpNettyServer1.shutdownGracefully();
}
@Test
public void testTcpOutputTransport16() throws InterruptedException {
log.info("tcpInputTransport TestCase 16");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='foo', @map(type='passThrough')) " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
StreamDefinition streamDefinition = StreamDefinition.id("foo").attribute("a", Attribute.Type.STRING)
.attribute("b", Attribute.Type.INT).attribute("c", Attribute.Type.FLOAT).attribute("d", Attribute.Type.LONG)
.attribute("e", Attribute.Type.DOUBLE).attribute("f", Attribute.Type.BOOL);
TCPNettyServer tcpNettyServer = new TCPNettyServer();
tcpNettyServer.addStreamListener(new StreamListener() {
@Override
public StreamDefinition getStreamDefinition() {
return streamDefinition;
}
@Override
public void onEvent(Event event) {
System.out.println(event);
eventArrived = true;
count++;
switch (count) {
case 1:
Assert.assertEquals("test", event.getData(0));
break;
case 2:
Assert.assertEquals("test1", event.getData(0));
break;
case 3:
Assert.assertEquals("test2", event.getData(0));
break;
default:
org.junit.Assert.fail();
}
}
@Override
public void onEvents(Event[] events) {
for (Event event : events) {
onEvent(event);
}
}
});
executionPlanRuntime.start();
Thread.sleep(2000);
tcpNettyServer.bootServer(new ServerConfig());
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
ArrayList<Event> arrayList = new ArrayList<Event>();
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test1", 361, 31.0f, 3801l, 231.0, false}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test2", 362, 32.0f, 3802l, 232.0, true}));
inputHandler.send(arrayList.toArray(new Event[3]));
Thread.sleep(300);
Assert.assertEquals(3, count);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
tcpNettyServer.shutdownGracefully();
}
}
| output-transports/tcp-output-transport/src/test/java/org/wso2/siddhi/extension/output/transport/tcp/TCPOutputTransportTestCase.java | /*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.siddhi.extension.output.transport.tcp;
import junit.framework.Assert;
import org.apache.log4j.Logger;
import org.junit.Before;
import org.junit.Test;
import org.wso2.siddhi.core.ExecutionPlanRuntime;
import org.wso2.siddhi.core.SiddhiManager;
import org.wso2.siddhi.core.event.Event;
import org.wso2.siddhi.core.exception.ExecutionPlanCreationException;
import org.wso2.siddhi.core.stream.input.InputHandler;
import org.wso2.siddhi.query.api.definition.Attribute;
import org.wso2.siddhi.query.api.definition.StreamDefinition;
import org.wso2.siddhi.query.api.exception.ExecutionPlanValidationException;
import org.wso2.siddhi.tcp.transport.TCPNettyServer;
import org.wso2.siddhi.tcp.transport.callback.StreamListener;
import org.wso2.siddhi.tcp.transport.config.ServerConfig;
import java.util.ArrayList;
public class TCPOutputTransportTestCase {
static final Logger log = Logger.getLogger(TCPOutputTransportTestCase.class);
private volatile int count;
private volatile int count1;
private volatile boolean eventArrived;
@Before
public void init() {
count = 0;
count1 = 0;
eventArrived = false;
}
@Test
public void testTcpOutputTransport1() throws InterruptedException {
log.info("tcpInputTransport TestCase 1");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='foo', @map(type='passThrough')) " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
StreamDefinition streamDefinition = StreamDefinition.id("foo").attribute("a", Attribute.Type.STRING)
.attribute("b", Attribute.Type.INT).attribute("c", Attribute.Type.FLOAT).attribute("d", Attribute.Type.LONG)
.attribute("e", Attribute.Type.DOUBLE).attribute("f", Attribute.Type.BOOL);
TCPNettyServer tcpNettyServer = new TCPNettyServer();
tcpNettyServer.addStreamListener(new StreamListener() {
@Override
public StreamDefinition getStreamDefinition() {
return streamDefinition;
}
@Override
public void onEvent(Event event) {
System.out.println(event);
eventArrived = true;
count++;
switch (count) {
case 1:
Assert.assertEquals("test", event.getData(0));
break;
case 2:
Assert.assertEquals("test1", event.getData(0));
break;
case 3:
Assert.assertEquals("test2", event.getData(0));
break;
default:
org.junit.Assert.fail();
}
}
@Override
public void onEvents(Event[] events) {
for (Event event : events) {
onEvent(event);
}
}
});
tcpNettyServer.bootServer(new ServerConfig());
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
ArrayList<Event> arrayList = new ArrayList<Event>();
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test1", 361, 31.0f, 3801l, 231.0, false}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test2", 362, 32.0f, 3802l, 232.0, true}));
inputHandler.send(arrayList.toArray(new Event[3]));
Thread.sleep(300);
Assert.assertEquals(3, count);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
tcpNettyServer.shutdownGracefully();
}
@Test(expected = ExecutionPlanValidationException.class)
public void testTcpOutputTransport2() throws InterruptedException {
log.info("tcpInputTransport TestCase 2");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', @map(type='passThrough')) " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
Thread.sleep(300);
executionPlanRuntime.shutdown();
}
@Test
public void testTcpOutputTransport3() throws InterruptedException {
log.info("tcpInputTransport TestCase 3");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='foo', host='127.0.0.1', port='9766', @map(type='passThrough')) " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
StreamDefinition streamDefinition = StreamDefinition.id("foo").attribute("a", Attribute.Type.STRING)
.attribute("b", Attribute.Type.INT).attribute("c", Attribute.Type.FLOAT).attribute("d", Attribute.Type.LONG)
.attribute("e", Attribute.Type.DOUBLE).attribute("f", Attribute.Type.BOOL);
TCPNettyServer tcpNettyServer = new TCPNettyServer();
tcpNettyServer.addStreamListener(new StreamListener() {
@Override
public StreamDefinition getStreamDefinition() {
return streamDefinition;
}
@Override
public void onEvent(Event event) {
System.out.println(event);
eventArrived = true;
count++;
switch (count) {
case 1:
Assert.assertEquals("test", event.getData(0));
break;
case 2:
Assert.assertEquals("test1", event.getData(0));
break;
case 3:
Assert.assertEquals("test2", event.getData(0));
break;
default:
org.junit.Assert.fail();
}
}
@Override
public void onEvents(Event[] events) {
for (Event event : events) {
onEvent(event);
}
}
});
ServerConfig serverConfig = new ServerConfig();
serverConfig.setPort(9766);
serverConfig.setHost("127.0.0.1");
tcpNettyServer.bootServer(serverConfig);
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
ArrayList<Event> arrayList = new ArrayList<Event>();
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test1", 361, 31.0f, 3801l, 231.0, false}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test2", 362, 32.0f, 3802l, 232.0, true}));
inputHandler.send(arrayList.toArray(new Event[3]));
Thread.sleep(300);
Assert.assertEquals(3, count);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
tcpNettyServer.shutdownGracefully();
}
@Test
public void testTcpOutputTransport4() throws InterruptedException {
log.info("tcpInputTransport TestCase 4");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='foo', port='9766', @map(type='passThrough')) " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
StreamDefinition streamDefinition = StreamDefinition.id("foo").attribute("a", Attribute.Type.STRING)
.attribute("b", Attribute.Type.INT).attribute("c", Attribute.Type.FLOAT).attribute("d", Attribute.Type.LONG)
.attribute("e", Attribute.Type.DOUBLE).attribute("f", Attribute.Type.BOOL);
TCPNettyServer tcpNettyServer = new TCPNettyServer();
tcpNettyServer.addStreamListener(new StreamListener() {
@Override
public StreamDefinition getStreamDefinition() {
return streamDefinition;
}
@Override
public void onEvent(Event event) {
System.out.println(event);
eventArrived = true;
count++;
switch (count) {
case 1:
Assert.assertEquals("test", event.getData(0));
break;
case 2:
Assert.assertEquals("test1", event.getData(0));
break;
case 3:
Assert.assertEquals("test2", event.getData(0));
break;
default:
org.junit.Assert.fail();
}
}
@Override
public void onEvents(Event[] events) {
for (Event event : events) {
onEvent(event);
}
}
});
ServerConfig serverConfig = new ServerConfig();
serverConfig.setPort(9766);
serverConfig.setHost("127.0.0.1");
tcpNettyServer.bootServer(serverConfig);
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
ArrayList<Event> arrayList = new ArrayList<Event>();
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test1", 361, 31.0f, 3801l, 231.0, false}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test2", 362, 32.0f, 3802l, 232.0, true}));
inputHandler.send(arrayList.toArray(new Event[3]));
Thread.sleep(300);
Assert.assertEquals(3, count);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
tcpNettyServer.shutdownGracefully();
}
@Test
public void testTcpOutputTransport5() throws InterruptedException {
log.info("tcpInputTransport TestCase 5");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='foo', port='9766', @map(type='passThrough')) " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
StreamDefinition streamDefinition = StreamDefinition.id("foo").attribute("a", Attribute.Type.STRING)
.attribute("b", Attribute.Type.INT).attribute("c", Attribute.Type.FLOAT).attribute("d", Attribute.Type.LONG)
.attribute("e", Attribute.Type.DOUBLE).attribute("f", Attribute.Type.BOOL);
TCPNettyServer tcpNettyServer = new TCPNettyServer();
tcpNettyServer.addStreamListener(new StreamListener() {
@Override
public StreamDefinition getStreamDefinition() {
return streamDefinition;
}
@Override
public void onEvent(Event event) {
System.out.println(event);
eventArrived = true;
count++;
switch (count) {
case 1:
Assert.assertEquals("test", event.getData(0));
break;
case 2:
Assert.assertEquals("test1", event.getData(0));
break;
case 3:
Assert.assertEquals("test2", event.getData(0));
break;
default:
org.junit.Assert.fail();
}
}
@Override
public void onEvents(Event[] events) {
for (Event event : events) {
onEvent(event);
}
}
});
ServerConfig serverConfig = new ServerConfig();
serverConfig.setPort(9766);
serverConfig.setHost("127.0.0.1");
tcpNettyServer.bootServer(serverConfig);
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
ArrayList<Event> arrayList = new ArrayList<Event>();
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test1", 361, 31.0f, 3801l, 231.0, false}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test2", 362, 32.0f, 3802l, 232.0, true}));
inputHandler.send(arrayList.toArray(new Event[3]));
Thread.sleep(300);
Assert.assertEquals(3, count);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
tcpNettyServer.shutdownGracefully();
}
@Test(expected = ExecutionPlanCreationException.class)
public void testTcpOutputTransport6() throws InterruptedException {
log.info("tcpInputTransport TestCase 6");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='foo', host='127.0.0.1', port='9766', @map(type='text')) " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = null;
try {
executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
} finally {
if (executionPlanRuntime != null) {
executionPlanRuntime.shutdown();
}
}
}
@Test
public void testTcpOutputTransport7() throws InterruptedException {
log.info("tcpInputTransport TestCase 7");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='foo') " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
StreamDefinition streamDefinition = StreamDefinition.id("foo").attribute("a", Attribute.Type.STRING)
.attribute("b", Attribute.Type.INT).attribute("c", Attribute.Type.FLOAT).attribute("d", Attribute.Type.LONG)
.attribute("e", Attribute.Type.DOUBLE).attribute("f", Attribute.Type.BOOL);
TCPNettyServer tcpNettyServer = new TCPNettyServer();
tcpNettyServer.addStreamListener(new StreamListener() {
@Override
public StreamDefinition getStreamDefinition() {
return streamDefinition;
}
@Override
public void onEvent(Event event) {
System.out.println(event);
eventArrived = true;
count++;
switch (count) {
case 1:
Assert.assertEquals("test", event.getData(0));
break;
case 2:
Assert.assertEquals("test1", event.getData(0));
break;
case 3:
Assert.assertEquals("test2", event.getData(0));
break;
default:
org.junit.Assert.fail();
}
}
@Override
public void onEvents(Event[] events) {
for (Event event : events) {
onEvent(event);
}
}
});
tcpNettyServer.bootServer(new ServerConfig());
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
ArrayList<Event> arrayList = new ArrayList<Event>();
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test1", 361, 31.0f, 3801l, 231.0, false}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test2", 362, 32.0f, 3802l, 232.0, true}));
inputHandler.send(arrayList.toArray(new Event[3]));
Thread.sleep(300);
Assert.assertEquals(3, count);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
tcpNettyServer.shutdownGracefully();
}
@Test
public void testTcpOutputTransport8() throws InterruptedException {
log.info("tcpInputTransport TestCase 8");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='{{a}}') " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
StreamDefinition streamDefinition = StreamDefinition.id("foo").attribute("a", Attribute.Type.STRING)
.attribute("b", Attribute.Type.INT).attribute("c", Attribute.Type.FLOAT).attribute("d", Attribute.Type.LONG)
.attribute("e", Attribute.Type.DOUBLE).attribute("f", Attribute.Type.BOOL);
TCPNettyServer tcpNettyServer = new TCPNettyServer();
tcpNettyServer.addStreamListener(new StreamListener() {
@Override
public StreamDefinition getStreamDefinition() {
return streamDefinition;
}
@Override
public void onEvent(Event event) {
System.out.println(event);
eventArrived = true;
count++;
switch (count) {
case 1:
Assert.assertEquals("foo", event.getData(0));
break;
case 2:
Assert.assertEquals("foo", event.getData(0));
break;
default:
org.junit.Assert.fail();
}
}
@Override
public void onEvents(Event[] events) {
for (Event event : events) {
onEvent(event);
}
}
});
tcpNettyServer.bootServer(new ServerConfig());
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
ArrayList<Event> arrayList = new ArrayList<Event>();
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"bar", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"bar", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"foo", 361, 31.0f, 3801l, 231.0, false}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"foo", 362, 32.0f, 3802l, 232.0, true}));
inputHandler.send(arrayList.toArray(new Event[4]));
Thread.sleep(300);
Assert.assertEquals(2, count);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
tcpNettyServer.shutdownGracefully();
}
@Test
public void testTcpOutputTransport9() throws InterruptedException {
log.info("tcpInputTransport TestCase 9");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='bar', @map(type='passThrough')) " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
StreamDefinition streamDefinition = StreamDefinition.id("foo").attribute("a", Attribute.Type.STRING)
.attribute("b", Attribute.Type.INT).attribute("c", Attribute.Type.FLOAT).attribute("d", Attribute.Type.LONG)
.attribute("e", Attribute.Type.DOUBLE).attribute("f", Attribute.Type.BOOL);
TCPNettyServer tcpNettyServer = new TCPNettyServer();
tcpNettyServer.addStreamListener(new StreamListener() {
@Override
public StreamDefinition getStreamDefinition() {
return streamDefinition;
}
@Override
public void onEvent(Event event) {
System.out.println(event);
eventArrived = true;
}
@Override
public void onEvents(Event[] events) {
for (Event event : events) {
onEvent(event);
}
}
});
tcpNettyServer.bootServer(new ServerConfig());
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
ArrayList<Event> arrayList = new ArrayList<Event>();
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test1", 361, 31.0f, 3801l, 231.0, false}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test2", 362, 32.0f, 3802l, 232.0, true}));
inputHandler.send(arrayList.toArray(new Event[3]));
Thread.sleep(300);
Assert.assertFalse(eventArrived);
executionPlanRuntime.shutdown();
tcpNettyServer.shutdownGracefully();
}
@Test
public void testTcpOutputTransport10() throws InterruptedException {
try {
log.info("tcpInputTransport TestCase 10");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='bar', @map(type='passThrough')) " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
ArrayList<Event> arrayList = new ArrayList<Event>();
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test1", 361, 31.0f, 3801l, 231.0, false}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test2", 362, 32.0f, 3802l, 232.0, true}));
inputHandler.send(arrayList.toArray(new Event[3]));
Thread.sleep(300);
Assert.assertFalse(eventArrived);
executionPlanRuntime.shutdown();
} catch (Throwable t) {
t.printStackTrace();
}
}
@Test(expected = ExecutionPlanCreationException.class)
public void testTcpOutputTransport11() throws InterruptedException {
log.info("tcpInputTransport TestCase 11");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='foo', host='127.0.0.1', port='{{d}}') " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = null;
try {
executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
} finally {
if (executionPlanRuntime != null) {
executionPlanRuntime.shutdown();
}
}
}
@Test(expected = ExecutionPlanCreationException.class)
public void testTcpOutputTransport12() throws InterruptedException {
log.info("tcpInputTransport TestCase 12");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='foo', host='{{a}}') " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = null;
try {
executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
} finally {
if (executionPlanRuntime != null) {
executionPlanRuntime.shutdown();
}
}
}
@Test
public void testTcpOutputTransport13() throws InterruptedException {
log.info("tcpInputTransport TestCase 13");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='foo') " +
"define stream outputStream1 (a string, b int, c float, d long, e double, f bool);" +
"@sink(type='tcp', context='foo') " +
"define stream outputStream2 (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"" +
"from inputStream " +
"select * " +
"insert into outputStream1; " +
"" +
"from inputStream " +
"select * " +
"insert into outputStream2; " +
"");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
StreamDefinition streamDefinition = StreamDefinition.id("foo").attribute("a", Attribute.Type.STRING)
.attribute("b", Attribute.Type.INT).attribute("c", Attribute.Type.FLOAT).attribute("d", Attribute.Type.LONG)
.attribute("e", Attribute.Type.DOUBLE).attribute("f", Attribute.Type.BOOL);
TCPNettyServer tcpNettyServer = new TCPNettyServer();
tcpNettyServer.addStreamListener(new StreamListener() {
@Override
public StreamDefinition getStreamDefinition() {
return streamDefinition;
}
@Override
public void onEvent(Event event) {
System.out.println(event);
eventArrived = true;
count++;
/*
commenting this out since we cannot guarantee an event order here
switch (count) {
case 1:
Assert.assertEquals("test", event.getData(0));
break;
case 2:
Assert.assertEquals("test1", event.getData(0));
break;
case 3:
Assert.assertEquals("test2", event.getData(0));
break;
case 4:
Assert.assertEquals("test", event.getData(0));
break;
case 5:
Assert.assertEquals("test1", event.getData(0));
break;
case 6:
Assert.assertEquals("test2", event.getData(0));
break;
default:
org.junit.Assert.fail();
}*/
}
@Override
public void onEvents(Event[] events) {
for (Event event : events) {
onEvent(event);
}
}
});
tcpNettyServer.bootServer(new ServerConfig());
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
ArrayList<Event> arrayList = new ArrayList<Event>();
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test1", 361, 31.0f, 3801l, 231.0, false}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test2", 362, 32.0f, 3802l, 232.0, true}));
inputHandler.send(arrayList.toArray(new Event[3]));
Thread.sleep(3000);
Assert.assertEquals(6, count);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
tcpNettyServer.shutdownGracefully();
}
@Test
public void testTcpOutputTransport14() throws InterruptedException {
log.info("tcpInputTransport TestCase 14");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='foo1', port='9854') " +
"@sink(type='tcp', context='foo2') " +
"define stream outputStream(a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"" +
"from inputStream " +
"select * " +
"insert into outputStream; " +
"");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
StreamDefinition streamDefinition1 = StreamDefinition.id("foo1").attribute("a", Attribute.Type.STRING)
.attribute("b", Attribute.Type.INT).attribute("c", Attribute.Type.FLOAT).attribute("d", Attribute.Type.LONG)
.attribute("e", Attribute.Type.DOUBLE).attribute("f", Attribute.Type.BOOL);
StreamDefinition streamDefinition2 = StreamDefinition.id("foo2").attribute("a", Attribute.Type.STRING)
.attribute("b", Attribute.Type.INT).attribute("c", Attribute.Type.FLOAT).attribute("d", Attribute.Type.LONG)
.attribute("e", Attribute.Type.DOUBLE).attribute("f", Attribute.Type.BOOL);
TCPNettyServer tcpNettyServer1 = new TCPNettyServer();
TCPNettyServer tcpNettyServer2 = new TCPNettyServer();
tcpNettyServer1.addStreamListener(new StreamListener() {
@Override
public StreamDefinition getStreamDefinition() {
return streamDefinition1;
}
@Override
public void onEvent(Event event) {
System.out.println(event);
eventArrived = true;
count++;
switch (count) {
case 1:
Assert.assertEquals("test", event.getData(0));
break;
case 2:
Assert.assertEquals("test1", event.getData(0));
break;
case 3:
Assert.assertEquals("test2", event.getData(0));
break;
default:
org.junit.Assert.fail();
}
}
@Override
public void onEvents(Event[] events) {
for (Event event : events) {
onEvent(event);
}
}
});
tcpNettyServer2.addStreamListener(new StreamListener() {
@Override
public StreamDefinition getStreamDefinition() {
return streamDefinition2;
}
@Override
public void onEvent(Event event) {
System.out.println(event);
eventArrived = true;
count1++;
switch (count1) {
case 1:
Assert.assertEquals("test", event.getData(0));
break;
case 2:
Assert.assertEquals("test1", event.getData(0));
break;
case 3:
Assert.assertEquals("test2", event.getData(0));
break;
default:
org.junit.Assert.fail();
}
}
@Override
public void onEvents(Event[] events) {
for (Event event : events) {
onEvent(event);
}
}
});
ServerConfig serverConfig = new ServerConfig();
serverConfig.setPort(9854);
tcpNettyServer1.bootServer(serverConfig);
tcpNettyServer2.bootServer(new ServerConfig());
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
ArrayList<Event> arrayList = new ArrayList<Event>();
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test1", 361, 31.0f, 3801l, 231.0, false}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test2", 362, 32.0f, 3802l, 232.0, true}));
inputHandler.send(arrayList.toArray(new Event[3]));
Thread.sleep(3000);
Assert.assertEquals(3, count);
Assert.assertEquals(3, count1);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
tcpNettyServer1.shutdownGracefully();
tcpNettyServer2.shutdownGracefully();
}
@Test
public void testTcpOutputTransport15() throws InterruptedException {
log.info("tcpInputTransport TestCase 15");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"@plan:name('foo') " +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='foo/inputStream1') " +
"define stream outputStream(a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"" +
"from inputStream " +
"select * " +
"insert into outputStream;" +
" " +
"");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
StreamDefinition streamDefinition1 = StreamDefinition.id("foo/inputStream1").attribute("a", Attribute.Type.STRING)
.attribute("b", Attribute.Type.INT).attribute("c", Attribute.Type.FLOAT).attribute("d", Attribute.Type.LONG)
.attribute("e", Attribute.Type.DOUBLE).attribute("f", Attribute.Type.BOOL);
TCPNettyServer tcpNettyServer1 = new TCPNettyServer();
tcpNettyServer1.addStreamListener(new StreamListener() {
@Override
public StreamDefinition getStreamDefinition() {
return streamDefinition1;
}
@Override
public void onEvent(Event event) {
System.out.println(event);
eventArrived = true;
count++;
switch (count) {
case 1:
Assert.assertEquals("test", event.getData(0));
break;
case 2:
Assert.assertEquals("test1", event.getData(0));
break;
case 3:
Assert.assertEquals("test2", event.getData(0));
break;
default:
org.junit.Assert.fail();
}
}
@Override
public void onEvents(Event[] events) {
for (Event event : events) {
onEvent(event);
}
}
});
tcpNettyServer1.bootServer(new ServerConfig());
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
ArrayList<Event> arrayList = new ArrayList<Event>();
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test1", 361, 31.0f, 3801l, 231.0, false}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test2", 362, 32.0f, 3802l, 232.0, true}));
inputHandler.send(arrayList.toArray(new Event[3]));
Thread.sleep(3000);
Assert.assertEquals(3, count);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
tcpNettyServer1.shutdownGracefully();
}
@Test
public void testTcpOutputTransport16() throws InterruptedException {
log.info("tcpInputTransport TestCase 16");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "" +
"define stream inputStream (a string, b int, c float, d long, e double, f bool); " +
"@sink(type='tcp', context='foo', @map(type='passThrough')) " +
"define stream outputStream (a string, b int, c float, d long, e double, f bool);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select * " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition + query);
StreamDefinition streamDefinition = StreamDefinition.id("foo").attribute("a", Attribute.Type.STRING)
.attribute("b", Attribute.Type.INT).attribute("c", Attribute.Type.FLOAT).attribute("d", Attribute.Type.LONG)
.attribute("e", Attribute.Type.DOUBLE).attribute("f", Attribute.Type.BOOL);
TCPNettyServer tcpNettyServer = new TCPNettyServer();
tcpNettyServer.addStreamListener(new StreamListener() {
@Override
public StreamDefinition getStreamDefinition() {
return streamDefinition;
}
@Override
public void onEvent(Event event) {
System.out.println(event);
eventArrived = true;
count++;
switch (count) {
case 1:
Assert.assertEquals("test", event.getData(0));
break;
case 2:
Assert.assertEquals("test1", event.getData(0));
break;
case 3:
Assert.assertEquals("test2", event.getData(0));
break;
default:
org.junit.Assert.fail();
}
}
@Override
public void onEvents(Event[] events) {
for (Event event : events) {
onEvent(event);
}
}
});
executionPlanRuntime.start();
Thread.sleep(2000);
tcpNettyServer.bootServer(new ServerConfig());
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
ArrayList<Event> arrayList = new ArrayList<Event>();
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test", 36, 3.0f, 380l, 23.0, true}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test1", 361, 31.0f, 3801l, 231.0, false}));
arrayList.add(new Event(System.currentTimeMillis(), new Object[]{"test2", 362, 32.0f, 3802l, 232.0, true}));
inputHandler.send(arrayList.toArray(new Event[3]));
Thread.sleep(300);
Assert.assertEquals(3, count);
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
tcpNettyServer.shutdownGracefully();
}
}
| Ignoring test case that need log analysis
| output-transports/tcp-output-transport/src/test/java/org/wso2/siddhi/extension/output/transport/tcp/TCPOutputTransportTestCase.java | Ignoring test case that need log analysis |
|
Java | apache-2.0 | ba58c6c571b02028ab2de74787cc64a2d7d5cc11 | 0 | dickschoeller/gedbrowser,dickschoeller/gedbrowser,dickschoeller/gedbrowser,dickschoeller/gedbrowser,dickschoeller/gedbrowser,dickschoeller/gedbrowser | package org.schoellerfamily.gedbrowser.persistence.mongo.gedconvert;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.schoellerfamily.gedbrowser.datamodel.Attribute;
import org.schoellerfamily.gedbrowser.datamodel.Child;
import org.schoellerfamily.gedbrowser.datamodel.Date;
import org.schoellerfamily.gedbrowser.datamodel.FamC;
import org.schoellerfamily.gedbrowser.datamodel.FamS;
import org.schoellerfamily.gedbrowser.datamodel.Family;
import org.schoellerfamily.gedbrowser.datamodel.GedObject;
import org.schoellerfamily.gedbrowser.datamodel.Head;
import org.schoellerfamily.gedbrowser.datamodel.Husband;
import org.schoellerfamily.gedbrowser.datamodel.Multimedia;
import org.schoellerfamily.gedbrowser.datamodel.Name;
import org.schoellerfamily.gedbrowser.datamodel.Person;
import org.schoellerfamily.gedbrowser.datamodel.Place;
import org.schoellerfamily.gedbrowser.datamodel.Root;
import org.schoellerfamily.gedbrowser.datamodel.Source;
import org.schoellerfamily.gedbrowser.datamodel.SourceLink;
import org.schoellerfamily.gedbrowser.datamodel.Submittor;
import org.schoellerfamily.gedbrowser.datamodel.SubmittorLink;
import org.schoellerfamily.gedbrowser.datamodel.Trailer;
import org.schoellerfamily.gedbrowser.datamodel.Wife;
import org.schoellerfamily.gedbrowser.persistence.GedDocumentLoader;
import org.schoellerfamily.gedbrowser.persistence.PersistenceException;
import org.schoellerfamily.gedbrowser.persistence.domain.GedDocument;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.AttributeDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.ChildDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.DateDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.FamCDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.FamSDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.FamilyDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.GedDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.HeadDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.HusbandDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.MultimediaDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.NameDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.PersonDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.PlaceDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.RootDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.SourceDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.SourceLinkDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.SubmittorDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.SubmittorLinkDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.TrailerDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.WifeDocumentMongo;
/**
* @author Dick Schoeller
*/
@SuppressWarnings({ "PMD.CouplingBetweenObjects", "PMD.ExcessiveImports" })
public final class GedObjectToGedDocumentMongoConverter
implements GedDocumentLoader {
/**
* Holds the mapping between GedObject and GedDocument.
*/
private static final Map<Class<? extends GedObject>,
Class<? extends GedDocumentMongo<? extends GedObject>>> CLASS_MAP =
new HashMap<>();
static {
CLASS_MAP.put(Attribute.class, AttributeDocumentMongo.class);
CLASS_MAP.put(Child.class, ChildDocumentMongo.class);
CLASS_MAP.put(Date.class, DateDocumentMongo.class);
CLASS_MAP.put(Name.class, NameDocumentMongo.class);
CLASS_MAP.put(Family.class, FamilyDocumentMongo.class);
CLASS_MAP.put(FamC.class, FamCDocumentMongo.class);
CLASS_MAP.put(FamS.class, FamSDocumentMongo.class);
CLASS_MAP.put(Head.class, HeadDocumentMongo.class);
CLASS_MAP.put(Husband.class, HusbandDocumentMongo.class);
CLASS_MAP.put(Multimedia.class, MultimediaDocumentMongo.class);
CLASS_MAP.put(Person.class, PersonDocumentMongo.class);
CLASS_MAP.put(Place.class, PlaceDocumentMongo.class);
CLASS_MAP.put(Root.class, RootDocumentMongo.class);
CLASS_MAP.put(Source.class, SourceDocumentMongo.class);
CLASS_MAP.put(SourceLink.class, SourceLinkDocumentMongo.class);
CLASS_MAP.put(Submittor.class, SubmittorDocumentMongo.class);
CLASS_MAP.put(SubmittorLink.class, SubmittorLinkDocumentMongo.class);
CLASS_MAP.put(Trailer.class, TrailerDocumentMongo.class);
CLASS_MAP.put(Wife.class, WifeDocumentMongo.class);
}
/**
* Constructor.
*/
public GedObjectToGedDocumentMongoConverter() {
// Empty
}
/**
* @param ged the GedObject that we are going to persist
* @return the mongo document to represent it
*/
private GedDocumentMongo<? extends GedObject> create(final GedObject ged) {
if (ged == null) {
throw new PersistenceException(
"Null ged object not supported");
}
final Class<? extends GedDocumentMongo<?>> mongoClass =
CLASS_MAP.get(ged.getClass());
if (mongoClass == null) {
throw new PersistenceException("Class not supported");
}
try {
final GedDocumentMongo<?> retval = mongoClass.newInstance();
retval.loadGedObject(this, ged);
return retval;
} catch (InstantiationException | IllegalAccessException e) {
throw new PersistenceException(
"Could not instantiate class", e);
}
}
/**
* @param document the document
* @param gedAttributes the attributes to add
*/
public void loadAttributes(final GedDocument<?> document,
final List<GedObject> gedAttributes) {
document.clearAttributes();
for (final GedObject ged : gedAttributes) {
final GedDocument<?> documentAttribute =
createGedDocument(ged);
document.addAttribute(documentAttribute);
}
}
/**
* @param <G> type of GedObject provided
* @param ged the GedObject that is being transformed
* @return the mongo document produced
*/
public <G extends GedObject> GedDocumentMongo<G> createGedDocument(
final G ged) {
@SuppressWarnings("unchecked")
final GedDocumentMongo<G> retval = (GedDocumentMongo<G>) create(ged);
retval.setGedObject(ged);
return retval;
}
}
| gedbrowser-mongo-dao/src/main/java/org/schoellerfamily/gedbrowser/persistence/mongo/gedconvert/GedObjectToGedDocumentMongoConverter.java | package org.schoellerfamily.gedbrowser.persistence.mongo.gedconvert;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.schoellerfamily.gedbrowser.datamodel.Attribute;
import org.schoellerfamily.gedbrowser.datamodel.Child;
import org.schoellerfamily.gedbrowser.datamodel.Date;
import org.schoellerfamily.gedbrowser.datamodel.FamC;
import org.schoellerfamily.gedbrowser.datamodel.FamS;
import org.schoellerfamily.gedbrowser.datamodel.Family;
import org.schoellerfamily.gedbrowser.datamodel.GedObject;
import org.schoellerfamily.gedbrowser.datamodel.Head;
import org.schoellerfamily.gedbrowser.datamodel.Husband;
import org.schoellerfamily.gedbrowser.datamodel.Multimedia;
import org.schoellerfamily.gedbrowser.datamodel.Name;
import org.schoellerfamily.gedbrowser.datamodel.Person;
import org.schoellerfamily.gedbrowser.datamodel.Place;
import org.schoellerfamily.gedbrowser.datamodel.Root;
import org.schoellerfamily.gedbrowser.datamodel.Source;
import org.schoellerfamily.gedbrowser.datamodel.SourceLink;
import org.schoellerfamily.gedbrowser.datamodel.Submittor;
import org.schoellerfamily.gedbrowser.datamodel.SubmittorLink;
import org.schoellerfamily.gedbrowser.datamodel.Trailer;
import org.schoellerfamily.gedbrowser.datamodel.Wife;
import org.schoellerfamily.gedbrowser.persistence.GedDocumentLoader;
import org.schoellerfamily.gedbrowser.persistence.PersistenceException;
import org.schoellerfamily.gedbrowser.persistence.domain.GedDocument;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.AttributeDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.ChildDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.DateDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.FamCDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.FamSDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.FamilyDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.GedDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.HeadDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.HusbandDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.MultimediaDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.NameDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.PersonDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.PlaceDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.RootDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.SourceDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.SourceLinkDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.SubmittorDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.SubmittorLinkDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.TrailerDocumentMongo;
import org.schoellerfamily.gedbrowser.persistence.mongo.domain.WifeDocumentMongo;
/**
* @author Dick Schoeller
*/
@SuppressWarnings({ "PMD.CouplingBetweenObjects", "PMD.ExcessiveImports" })
public final class GedObjectToGedDocumentMongoConverter
implements GedDocumentLoader {
/**
* Holds the mapping between GedObject and GedDocument.
*/
private static final Map<Class<? extends GedObject>,
Class<? extends GedDocumentMongo<? extends GedObject>>> CLASS_MAP =
new HashMap<>();
static {
CLASS_MAP.put(Attribute.class, AttributeDocumentMongo.class);
CLASS_MAP.put(Child.class, ChildDocumentMongo.class);
CLASS_MAP.put(Date.class, DateDocumentMongo.class);
CLASS_MAP.put(Name.class, NameDocumentMongo.class);
CLASS_MAP.put(Family.class, FamilyDocumentMongo.class);
CLASS_MAP.put(FamC.class, FamCDocumentMongo.class);
CLASS_MAP.put(FamS.class, FamSDocumentMongo.class);
CLASS_MAP.put(Head.class, HeadDocumentMongo.class);
CLASS_MAP.put(Husband.class, HusbandDocumentMongo.class);
CLASS_MAP.put(Multimedia.class, MultimediaDocumentMongo.class);
CLASS_MAP.put(Person.class, PersonDocumentMongo.class);
CLASS_MAP.put(Place.class, PlaceDocumentMongo.class);
CLASS_MAP.put(Root.class, RootDocumentMongo.class);
CLASS_MAP.put(Source.class, SourceDocumentMongo.class);
CLASS_MAP.put(SourceLink.class, SourceLinkDocumentMongo.class);
CLASS_MAP.put(Submittor.class, SubmittorDocumentMongo.class);
CLASS_MAP.put(SubmittorLink.class, SubmittorLinkDocumentMongo.class);
CLASS_MAP.put(Trailer.class, TrailerDocumentMongo.class);
CLASS_MAP.put(Wife.class, WifeDocumentMongo.class);
}
/**
* Constructor.
*/
public GedObjectToGedDocumentMongoConverter() {
// Empty
}
/**
* @param ged the GedObject that we are going to persist
* @return the mongo document to represent it
*/
private GedDocumentMongo<? extends GedObject> create(final GedObject ged) {
if (ged == null) {
throw new PersistenceException(
"Null ged object not supported");
}
final Class<? extends GedDocumentMongo<?>> mongoClass =
CLASS_MAP.get(ged.getClass());
if (mongoClass == null) {
throw new PersistenceException("Class not supported");
}
try {
final GedDocumentMongo<?> retval = mongoClass.newInstance();
retval.loadGedObject(this, ged);
return retval;
} catch (InstantiationException | IllegalAccessException e) {
throw new PersistenceException(
"Could not instantiate class", e);
}
}
/**
* @param document the document
* @param gedAttributes the attributes to add
*/
public void loadAttributes(final GedDocument<?> document,
final List<GedObject> gedAttributes) {
document.clearAttributes();
for (final GedObject ged : gedAttributes) {
// This happens because appenders create a list entry when they
// are not retained.
if (ged == null) {
continue;
}
final GedDocument<?> documentAttribute =
createGedDocument(ged);
document.addAttribute(documentAttribute);
}
}
/**
* @param <G> type of GedObject provided
* @param ged the GedObject that is being transformed
* @return the mongo document produced
*/
public <G extends GedObject> GedDocumentMongo<G> createGedDocument(
final G ged) {
@SuppressWarnings("unchecked")
final GedDocumentMongo<G> retval = (GedDocumentMongo<G>) create(ged);
retval.setGedObject(ged);
return retval;
}
}
| Fixes #319 - remove redundant null check
| gedbrowser-mongo-dao/src/main/java/org/schoellerfamily/gedbrowser/persistence/mongo/gedconvert/GedObjectToGedDocumentMongoConverter.java | Fixes #319 - remove redundant null check |
|
Java | apache-2.0 | b98c46c60918824eb463e859d68473474a6538c0 | 0 | gosu-lang/gosu-lang,tcmoore32/sheer-madness,tcmoore32/sheer-madness,gosu-lang/gosu-lang,gosu-lang/gosu-lang,tcmoore32/sheer-madness,tcmoore32/sheer-madness,gosu-lang/gosu-lang,tcmoore32/sheer-madness | /*
* Copyright 2014 Guidewire Software, Inc.
*/
package gw.internal.gosu.parser;
import gw.internal.gosu.ir.transform.util.IRTypeResolver;
import gw.internal.gosu.parser.expressions.BlockExpression;
import gw.internal.gosu.parser.expressions.ClassDeclaration;
import gw.internal.gosu.parser.expressions.InterfacesClause;
import gw.internal.gosu.parser.expressions.MethodCallExpression;
import gw.internal.gosu.parser.expressions.NameInDeclaration;
import gw.internal.gosu.parser.expressions.NullExpression;
import gw.internal.gosu.parser.expressions.ParameterListClause;
import gw.internal.gosu.parser.expressions.SuperTypeClause;
import gw.internal.gosu.parser.expressions.TypeLiteral;
import gw.internal.gosu.parser.expressions.TypeVariableDefinitionImpl;
import gw.internal.gosu.parser.statements.ClassStatement;
import gw.internal.gosu.parser.statements.ConstructorStatement;
import gw.internal.gosu.parser.statements.DelegateStatement;
import gw.internal.gosu.parser.statements.FunctionStatement;
import gw.internal.gosu.parser.statements.MethodCallStatement;
import gw.internal.gosu.parser.statements.NamespaceStatement;
import gw.internal.gosu.parser.statements.NoOpStatement;
import gw.internal.gosu.parser.statements.NotAStatement;
import gw.internal.gosu.parser.statements.PropertyStatement;
import gw.internal.gosu.parser.statements.ReturnStatement;
import gw.internal.gosu.parser.statements.StatementList;
import gw.internal.gosu.parser.statements.UsesStatement;
import gw.internal.gosu.parser.statements.VarInitializationVerifier;
import gw.internal.gosu.parser.statements.VarStatement;
import gw.lang.annotation.UsageTarget;
import gw.lang.ir.IRType;
import gw.lang.parser.GlobalScope;
import gw.lang.parser.GosuParserTypes;
import gw.lang.parser.IBlockClass;
import gw.lang.parser.IDynamicFunctionSymbol;
import gw.lang.parser.IFunctionSymbol;
import gw.lang.parser.IParseIssue;
import gw.lang.parser.IParseTree;
import gw.lang.parser.IParsedElement;
import gw.lang.parser.IParsedElementWithAtLeastOneDeclaration;
import gw.lang.parser.IParserState;
import gw.lang.parser.IReducedDynamicFunctionSymbol;
import gw.lang.parser.IScope;
import gw.lang.parser.ISymbol;
import gw.lang.parser.ISymbolTable;
import gw.lang.parser.IToken;
import gw.lang.parser.ITokenizerOffsetMarker;
import gw.lang.parser.ITypeUsesMap;
import gw.lang.parser.Keyword;
import gw.lang.parser.ScriptPartId;
import gw.lang.parser.exceptions.NotImplementedParseException;
import gw.lang.parser.exceptions.ObsoleteConstructorWarning;
import gw.lang.parser.exceptions.ParseException;
import gw.lang.parser.exceptions.ParseIssue;
import gw.lang.parser.exceptions.ParseResultsException;
import gw.lang.parser.expressions.IMemberAccessExpression;
import gw.lang.parser.expressions.IModifierListClause;
import gw.lang.parser.expressions.IParameterDeclaration;
import gw.lang.parser.expressions.ITypeVariableDefinition;
import gw.lang.parser.expressions.ITypeVariableDefinitionExpression;
import gw.lang.parser.expressions.Variance;
import gw.lang.parser.resources.Res;
import gw.lang.parser.resources.ResourceKey;
import gw.lang.parser.statements.IClassStatement;
import gw.lang.parser.statements.IFunctionStatement;
import gw.lang.parser.statements.ITerminalStatement;
import gw.lang.parser.statements.IUsesStatementList;
import gw.lang.reflect.FunctionType;
import gw.lang.reflect.IConstructorInfo;
import gw.lang.reflect.IEnhanceableType;
import gw.lang.reflect.IErrorType;
import gw.lang.reflect.IFeatureInfo;
import gw.lang.reflect.IFunctionType;
import gw.lang.reflect.IInvocableType;
import gw.lang.reflect.IMethodInfo;
import gw.lang.reflect.IParameterInfo;
import gw.lang.reflect.IPropertyInfo;
import gw.lang.reflect.IRelativeTypeInfo;
import gw.lang.reflect.IType;
import gw.lang.reflect.ITypeInfo;
import gw.lang.reflect.ITypeVariableType;
import gw.lang.reflect.MethodList;
import gw.lang.reflect.Modifier;
import gw.lang.reflect.TypeSystem;
import gw.lang.reflect.gs.ClassType;
import gw.lang.reflect.gs.IGenericTypeVariable;
import gw.lang.reflect.gs.IGosuClass;
import gw.lang.reflect.gs.IGosuClassParser;
import gw.lang.reflect.gs.IGosuEnhancement;
import gw.lang.reflect.gs.IGosuMethodInfo;
import gw.lang.reflect.gs.IGosuProgram;
import gw.lang.reflect.gs.ISourceFileHandle;
import gw.lang.reflect.gs.StringSourceFileHandle;
import gw.lang.reflect.java.GosuTypes;
import gw.lang.reflect.java.IJavaType;
import gw.lang.reflect.java.JavaTypes;
import gw.util.DynamicArray;
import gw.util.GosuExceptionUtil;
import gw.util.GosuObjectUtil;
import gw.util.GosuStringUtil;
import gw.util.Stack;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
*/
@SuppressWarnings({"ThrowableInstanceNeverThrown"})
public class GosuClassParser extends ParserBase implements IGosuClassParser, ITokenizerOffsetMarker
{
private int _iClassOffset;
private int _iClassLineNum;
private int _iClassColumn;
private ClassStatement _classStmt;
private Stack<IGosuClassInternal> _innerClasses;
private int _innerClassOffset;
public GosuClassParser( GosuParser owner )
{
super( owner );
_innerClasses = new Stack<IGosuClassInternal>();
}
//## todo: maybe ctors should set the class here so that subsequent calls to parseXxx() don't need to take a IGosuClass
private GosuClassParser( GosuParser owner, IGosuClassInternal innerClass )
{
super( owner );
int mark = ((InnerClassFileSystemSourceFileHandle)innerClass.getSourceFileHandle()).getMark();
if( mark >= 0 )
{
getTokenizer().restoreToMark( mark );
}
else
{
goToPosition( innerClass.getSourceFileHandle().getOffset() );
}
_innerClassOffset = getTokenizer().mark();
_innerClasses = new Stack<IGosuClassInternal>();
}
public static void parseAnonymousInnerClass( GosuParser gosuParser, IGosuClassInternal innerGsClass )
{
Stack<BlockExpression> enclosingBlocks = gosuParser._blocks;
gosuParser.setBlocks( null );
Map<String, List<IFunctionSymbol>> restoreDfsDecls = copyDFSDecls( gosuParser );
try
{
new GosuClassParser( gosuParser, innerGsClass ).parseHeader(innerGsClass, false, true, true );
new GosuClassParser( gosuParser, innerGsClass ).parseDeclarations( innerGsClass );
if( !gosuParser.getContextType().isMethodScoring() )
{
new GosuClassParser( gosuParser, innerGsClass ).parseDefinitions( innerGsClass );
}
}
finally
{
gosuParser.setDfsDeclInSetByName( restoreDfsDecls );
gosuParser.setBlocks( enclosingBlocks );
}
}
@Override
protected String getScript()
{
return getOwner().getScript();
}
@Override
public int getLineNumShift()
{
return getOwner().getLineNumShift();
}
@Override
public int getOffsetShift()
{
return getOwner().getOffsetShift();
}
@Override
public int getOffsetMark()
{
if( isInnerClass( getGosuClass() ) )
{
return _innerClassOffset;
}
return -1;
}
@Override
public ClassStatement getClassStatement()
{
return _classStmt;
}
private void setClassStatement( ClassStatement classStmt )
{
if( classStmt == null )
{
throw new IllegalArgumentException( "Class stmt is null" );
}
_classStmt = classStmt;
}
private IGosuClassInternal getCurrentInnerClass()
{
return _innerClasses.isEmpty() ? null : _innerClasses.peek();
}
private void pushInnerClass( IGosuClassInternal gsInnerClass )
{
_innerClasses.push( gsInnerClass );
}
private IGosuClassInternal popInnerClass( IGosuClassInternal gsInnerClass )
{
IGosuClassInternal top = _innerClasses.pop();
if( top != gsInnerClass )
{
throw new IllegalStateException( "Unbalanced push/pop for inner classes" );
}
return top;
}
private boolean isInnerClassesEmpty()
{
return _innerClasses.isEmpty();
}
/**
* Parses all declarations including:<br>
* <ul>
* <li> Fields
* <li> Methods
* <li> Properties
* <li> Inner types, recursively
* </ul>
*/
public void parseDeclarations( IGosuClass gsCls )
{
IGosuClassInternal gsClass = (IGosuClassInternal)gsCls;
if( gsClass.isDeclarationsCompiled() )
{
if( !gsClass.isInnerDeclarationsCompiled() )
{
if( parseDeclarationsOfLeftOverInnerClasses( gsClass ) )
{
gsClass.setInnerDeclarationsCompiled();
}
}
return;
}
boolean bPushedScope = pushScopeIfNeeded( gsClass );
getTokenizer().pushOffsetMarker( this );
ScriptPartId scriptPartId = new ScriptPartId( gsClass, null );
getOwner().pushScriptPart( scriptPartId );
GosuClassCompilingStack.pushCompilingType( gsClass );
gsClass.setCompilingDeclarations( true );
try
{
ClassStatement classStmt = (ClassStatement)gsClass.getClassStatement();
try
{
setClassStatement( classStmt );
}
catch( Exception e )
{
throw GosuExceptionUtil.forceThrow( e, gsClass.getName() );
}
if( isTopLevelClass( gsClass ) || TypeLord.isEvalProgram( gsClass ) )
{
classStmt.getClassFileStatement().clearParseTreeInformation();
}
// Don't need an isolated scope here because class members are all dynamic
// and, therefore, don't have to be indexed wrt an isolated scope.
getSymbolTable().pushScope();
try
{
//## todo: reparsing header with annotations this time, any chance we can do that the first time we parse the header, so we can avoid doing it twice?
String strClassName = parseHeader(gsClass, false, false, true);
if( gsClass instanceof IGosuEnhancementInternal )
{
parseEnhancementBodyDecl( gsClass );
}
else
{
parseClassBodyDecl( strClassName, gsClass );
}
}
finally
{
getSymbolTable().popScope();
pushStatement( classStmt );
setLocation( _iClassOffset, _iClassLineNum, _iClassColumn, true );
popStatement();
if( isTopLevelClass( gsClass ) || TypeLord.isEvalProgram( gsClass ) )
{
pushStatement( classStmt.getClassFileStatement() );
setLocation( 0, 1, _iClassColumn, true );
popStatement();
}
}
classStmt.compactParseTree();
}
finally
{
gsClass.setCompilingDeclarations( false );
// Do not set decls compiled; we do that in parseClassBodyDecl(). Also the decls may not have actually been compiled
//gsClass.setDeclarationsCompiled();
GosuClassCompilingStack.popCompilingType();
getOwner().popScriptPart( scriptPartId );
popScopeIfNeeded( bPushedScope, gsClass );
getTokenizer().popOffsetMarker( this );
removeTypeVarsFromParserMap( gsClass );
}
}
private boolean isTopLevelClass( IGosuClassInternal gsClass )
{
return gsClass.getEnclosingType() == null;
}
// /**
// * Extend the bounds of the enclosing ClassFileStatement if need be. Note this is only necessary when
// * the enclosing class has errors and, therefore, may not have parsed elements with the
// */
// private void extendEnclosingClassFileBounds( IParsedElement enclosingClassFileStmt )
// {
// if( enclosingClassFileStmt.getLocation() != null )
// {
// int iExtentDelta = enclosingClassFileStmt.getLocation().getExtent() - getClassStatement().getClassFileStatement().getLocation().getExtent();
// if( iExtentDelta < 0 )
// {
// enclosingClassFileStmt.getLocation().setLength( enclosingClassFileStmt.getLocation().getLength() + -iExtentDelta );
// }
// }
// }
public void parseDefinitions( IGosuClass gsCls )
{
IGosuClassInternal gsClass = (IGosuClassInternal)gsCls;
getTokenizer().pushOffsetMarker( this );
boolean bPushedScope = pushScopeIfNeeded( gsClass );
gsClass.setCompilingDefinitions( true );
GosuClassParseInfo parseInfo = gsClass.getParseInfo();
ClassStatement classStmt = parseInfo.getClassStatement();
setClassStatement( classStmt );
clearParseTree( gsClass );
ScriptPartId scriptPartId = new ScriptPartId( gsClass, null );
getOwner().pushScriptPart( scriptPartId );
GosuClassCompilingStack.pushCompilingType( gsClass );
getOwner()._iReturnOk++;
if( isDeprecated( (ModifierInfo)gsCls.getModifierInfo() ) )
{
getOwner().pushIgnoreTypeDeprecation();
}
try
{
try
{
if( !gsClass.isDefinitionsCompiled() )
{
// Don't need an isolated scope here because class members are all dynamic
// and, therefore, don't have to be indexed wrt an isolated scope.
getSymbolTable().pushScope();
try
{
//
// Reset the tokenizer to prepare for secon.. er third pass
//
getTokenizer().reset();
if( isTopLevelClass( gsClass ) || TypeLord.isEvalProgram( gsClass ) )
{
getLocationsList().clear();
}
else
{
removeInnerClassDelcarationsFromLocationsList( gsClass );
}
//
// Parse the whole class, including inner types
//
// Note function definitions are parsed as no-op statements, but are
// pushed onto the dynamic function symobl stack.
//## todo: do we really need to parse the header *again* (maybe for annotations?)
parseHeader(gsClass, false, false, true );
if( gsClass instanceof IGosuEnhancementInternal )
{
parseClassStatementAsEnhancement( gsClass );
}
else
{
parseClassStatement();
}
}
finally
{
getSymbolTable().popScope();
if( gsClass instanceof IGosuProgramInternal )
{
((IGosuProgramInternal)gsClass).setParsingExecutableProgramStatements( true );
try
{
FunctionStatement fs = parseExecutableProgramStatements( (IGosuProgramInternal)gsClass );
makeExprRootFunction( (IGosuProgramInternal)gsClass, fs );
}
finally
{
((IGosuProgramInternal)gsClass).setParsingExecutableProgramStatements( false );
}
}
boolean b = isInnerClass( gsClass ) || match( null, SourceCodeTokenizer.TT_EOF );
if( !verify( classStmt, b, Res.MSG_END_OF_STMT ) )
{
consumeTrailingTokens();
}
gsClass.setDefinitionsCompiled();
}
}
if( isTopLevelClass( gsClass ) || TypeLord.isEvalProgram( gsClass ) )
{
getOwner().setParsed( true );
}
}
finally
{
pushStatement( classStmt );
setLocation( _iClassOffset, _iClassLineNum, _iClassColumn, true );
if( isTopLevelClass( gsClass ) || TypeLord.isEvalProgram( gsClass ) )
{
popStatement();
pushStatement( classStmt.getClassFileStatement() );
setLocation( 0, 1, _iClassColumn, true );
popStatement();
}
assignTokens( classStmt );
}
try
{
verifyParsedElement( isInnerClass( gsClass ) && !TypeLord.isEvalProgram( gsClass ) ? classStmt : classStmt.getClassFileStatement() );
}
catch( ParseResultsException pre )
{
gsClass.setParseResultsException( pre );
}
}
finally
{
try
{
gsClass.setCompilingDefinitions( false );
gsClass.setDefinitionsCompiled();
getOwner().popScriptPart( scriptPartId );
}
finally
{
GosuClassCompilingStack.popCompilingType();
}
popScopeIfNeeded( bPushedScope, gsClass );
getTokenizer().popOffsetMarker( this );
removeTypeVarsFromParserMap( gsClass );
getOwner()._iReturnOk--;
pushStatement( _classStmt.getClassFileStatement() );
setLocation( 0, 1, _iClassColumn, true );
popStatement();
if( isDeprecated( (ModifierInfo)gsCls.getModifierInfo() ) )
{
getOwner().popIgnoreTypeDeprecation();
}
gsClass.syncGenericAndParameterizedClasses();
getOwner().clearDfsStack();
_classStmt = null;
VarInitializationVerifier.verifyFinalFields( gsClass );
VarInitializationVerifier.verifyLocalVars( gsClass, true );
if( isTopLevelClass( gsClass ) )
{
postDefinitionVerify(classStmt);
}
}
}
private void postDefinitionVerify( IClassStatement classStmt )
{
if( classStmt == null )
{
return;
}
IGosuClass gsClass = classStmt.getGosuClass();
if( gsClass.isAnonymous() || gsClass instanceof IBlockClass )
{
return;
}
CompileTimeAnnotationHandler.postDefinitionVerification( classStmt );
for( IGosuClass innerClass: classStmt.getGosuClass().getInnerClasses() )
{
postDefinitionVerify( innerClass.getClassStatement() );
}
}
private void removeInnerClassDelcarationsFromLocationsList( IGosuClassInternal gsClass )
{
List<ParseTree> locations = getLocationsList();
for( int i = locations.size()-1; i >= 0; i-- )
{
ParseTree csr = locations.get( i );
if( csr.getScriptPartId().getContainingType() == gsClass )
{
IParseTree parent = csr.getParent();
if( parent != null )
{
parent.removeChild( csr );
}
locations.remove( csr );
}
else
{
break;
}
}
}
private void consumeTrailingTokens()
{
while( !match( null, SourceCodeTokenizer.TT_EOF ) )
{
getTokenizer().nextToken();
}
}
private void assignTokens( ClassStatement classStmt )
{
if( !getOwner().isEditorParser() )
{
return;
}
if( !isTopLevelClass( classStmt.getGosuClass() ) )
{
return;
}
List<Token> tokens = getOwner().getTokenizer().getTokens().toList();
classStmt.getClassFileStatement().assignTokens( tokens );
//## todo: handle programs (see GosuAstTransformer)
// String strSource = getGosuClass().getSource();
// String strTextFromParseTree = classStmt.getClassFileStatement().getLocation().getTextFromTokens();
// if( !strSource.equals( strTextFromParseTree ) )
// {
// int[] diff = getDiffOffset( strSource, strTextFromParseTree );
//
// throw new IllegalStateException( buildInconsistentParseErrorMessage( strSource, strTextFromParseTree, diff ) );
// }
//noinspection LoopStatementThatDoesntLoop
for( IToken token : tokens )
{
throw new IllegalStateException( "One or more tokens were not assigned: " + token );
}
}
private String buildInconsistentParseErrorMessage( String strSource, String strTextFromParseTree, int[] diff )
{
return
"Parsed class, " + getGosuClass().getName() + ", inconsistent with source.\n" +
"Line: " + diff[1] + " Offset: " + diff[0] + "\n" +
"*** Parsed Version ***\n" +
ParseIssue.makeContextString( diff[1], strTextFromParseTree, diff[2] ) + "\n" +
"*** Source Version ***\n" +
ParseIssue.makeContextString( diff[1], strSource, diff[2] ) + "\n";
}
private int[] getDiffOffset( String strSource, String strTextFromParseTree )
{
if( strSource == null || strTextFromParseTree == null )
{
return null;
}
int i;
int iLineOffset = 0;
int iLine = 0;
for( i = 0; i < strSource.length(); i++ )
{
if( i >= strTextFromParseTree.length() )
{
return new int[] {i, iLine, iLineOffset};
}
char sourceChar = strSource.charAt( i );
char parserChar = strTextFromParseTree.charAt( i );
if( sourceChar != parserChar )
{
return new int[] {i, iLine, iLineOffset};
}
if( parserChar == '\n' )
{
iLine++;
iLineOffset = i;
}
}
return new int[] {i, iLine, iLineOffset};
}
private void clearParseTree( IGosuClassInternal gsClass )
{
if( (!(gsClass instanceof IGosuProgram) && isTopLevelClass( gsClass )) ||
TypeLord.isEvalProgram( gsClass ) )
{
gsClass.getClassStatement().getClassFileStatement().clearParseTreeInformation();
}
else
{
gsClass.getClassStatement().clearParseTreeInformation();
if( gsClass.isAnonymous() )
{
//noinspection SuspiciousMethodCalls
if( !getLocationsList().isEmpty() )
{
ParseTree last = getLocationsList().get( getLocationsList().size() - 1 );
if( last.getParsedElement() == null )
{
// Remove abandoned class-stmt parse tree from decl parse
getLocationsList().remove( last );
}
}
}
}
}
private boolean isInnerClass( IGosuClassInternal gsClass )
{
return gsClass.getEnclosingType() != null;
}
private FunctionStatement parseExecutableProgramStatements( IGosuProgramInternal gsClass )
{
List savedLocations = getOwner().getLocations();
getTokenizer().resetButKeepTokens();
getLocationsList().clear();
getOwner().setLocationsFromProgramClassParser( savedLocations );
parseHeader( gsClass, false, false, true );
gsClass.addCapturedProgramSymbols( getSymbolTable() );
FunctionStatement fs = parseProgramAsFunctionStatement( gsClass );
List newLocations = getOwner().getLocations();
removeRedundantUsesStatementList( newLocations );
getOwner().getLocationsList().clear();
getOwner().setLocationsFromProgramClassParser( null );
getOwner().getLocationsList().addAll( savedLocations );
getOwner().getLocationsList().addAll( newLocations );
return fs;
}
private void removeRedundantUsesStatementList( List newLocations )
{
for( int i = 0; i < newLocations.size(); i++ )
{
IParseTree pt = (IParseTree)newLocations.get( i );
if( pt.getParsedElement() instanceof IUsesStatementList )
{
newLocations.remove( i-- );
}
}
}
private void makeExprRootFunction( IGosuProgramInternal gsClass, FunctionStatement callableStmt )
{
DynamicFunctionSymbol dfsDecl = getProgramRootExprValueDfs();
if (dfsDecl != null) {
getOwner().putDfsDeclInSetByName( dfsDecl );
StatementList stmtList = makeReturnStatementWithExprRoot( gsClass, callableStmt );
if( stmtList != null )
{
FunctionStatement fs = new FunctionStatement();
fs.setDynamicFunctionSymbol( dfsDecl );
dfsDecl.setValueDirectly( stmtList );
getOwner().pushDynamicFunctionSymbol( dfsDecl );
fs.setDynamicFunctionSymbol( dfsDecl );
dfsDecl.setClassMember( true );
gsClass.getParseInfo().addMemberFunction(dfsDecl);
}
}
}
private StatementList makeReturnStatementWithExprRoot( IGosuProgramInternal gsClass, FunctionStatement callableStmt )
{
Statement statement = (Statement)callableStmt.getDynamicFunctionSymbol().getValueDirectly();
if( statement != null )
{
boolean[] bAbsolute = {false};
ITerminalStatement significantTerminalStatement = statement.getLeastSignificantTerminalStatement( bAbsolute );
if( gsClass.isGenRootExprAccess() &&
bAbsolute[0] &&
significantTerminalStatement instanceof ReturnStatement &&
significantTerminalStatement.getParent() != null &&
significantTerminalStatement.getParent().getParent() == callableStmt )
{
ReturnStatement rs = (ReturnStatement)significantTerminalStatement;
Expression expr = rs.getValue();
if( expr instanceof IMemberAccessExpression )
{
Expression rootExpr = (Expression)((IMemberAccessExpression)expr).getRootExpression();
ReturnStatement defaultReturnStmt = new ReturnStatement();
defaultReturnStmt.setValue( rootExpr );
List<Statement> stmts = new ArrayList<Statement>( 2 );
stmts.add( defaultReturnStmt );
StatementList stmtList = new StatementList( getSymbolTable() );
stmtList.setStatements( stmts );
return stmtList;
}
}
}
ReturnStatement defaultReturnStmt = new ReturnStatement();
NullExpression nullExpr = new NullExpression();
nullExpr.setType( JavaTypes.OBJECT() );
defaultReturnStmt.setValue( nullExpr );
List<Statement> stmts = new ArrayList<Statement>( 2 );
stmts.add( defaultReturnStmt );
StatementList stmtList = new StatementList( getSymbolTable() );
stmtList.setStatements( stmts );
return stmtList;
}
private DynamicFunctionSymbol getProgramRootExprValueDfs()
{
for( IDynamicFunctionSymbol dfs : getGosuClass().getMemberFunctions() )
{
if( dfs.getName().contains( "evaluateRootExpr" ) )
{
return (DynamicFunctionSymbol)dfs;
}
}
return null;
}
private FunctionStatement parseProgramAsFunctionStatement( IGosuClassInternal gsClass )
{
// Copy the Non-Static Scope so we can reuse it for each member
//
IScope nonstaticScope;
Map<String, List<IFunctionSymbol>> nonstaticDfsMap;
getSymbolTable().pushScope();
try
{
getOwner().newDfsDeclInSetByName();
gsClass.putClassMembers( getOwner(), getSymbolTable(), getGosuClass(), false );
nonstaticDfsMap = getOwner().getDfsDecls();
getOwner().newDfsDeclInSetByName();
}
finally
{
nonstaticScope = getSymbolTable().popScope();
}
getSymbolTable().pushScope();
getOwner().newDfsDeclInSetByName();
FunctionStatement functionStmt;
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
try
{
getOwner().setDfsDeclInSetByName( nonstaticDfsMap );
getOwner().putDfsDeclsInTable( ((IGosuProgramInternal)getGosuClass()).getSymbolTable() );
getSymbolTable().pushScope( nonstaticScope );
getOwner().pushParsingStaticMember( false );
try
{
functionStmt = getOwner().parseProgramEntryPointBody();
}
finally
{
getSymbolTable().popScope();
getOwner().popParsingStaticMember();
}
DynamicFunctionSymbol dfs = functionStmt == null ? null : functionStmt.getDynamicFunctionSymbol();
if( dfs != null )
{
dfs.setClassMember( true );
if( dfs.getDisplayName().equals( gsClass.getRelativeName() ) )
{
gsClass.getParseInfo().addConstructorFunction(dfs);
}
else
{
gsClass.getParseInfo().addMemberFunction(dfs);
}
}
}
finally
{
getOwner().newDfsDeclInSetByName();
getSymbolTable().popScope();
}
setLocation( iOffset, iLineNum, iColumn, true );
if( getTokenizer().getTokenStart() == iOffset )
{
getLocationsList().remove( getLocationsList().size() - 1 );
}
functionStmt = (FunctionStatement)popStatement();
return functionStmt;
}
private void parseClassBodyDecl( String strClassName, IGosuClassInternal gsClass )
{
try
{
if( strClassName != null )
{
IType type = TypeLoaderAccess.instance().getIntrinsicTypeByFullName( strClassName );
if( TypeSystem.getOrCreateTypeReference( gsClass ) != type && !(gsClass instanceof IGosuClassFragment) )
{
getClassStatement().addParseException( new ParseException( makeFullParserState(), Res.MSG_DUPLICATE_CLASS_FOUND, type.getName() ) );
}
}
}
catch( ClassNotFoundException e )
{
// ignore
}
maybeForceRecursiveTypeToAssignSuperTypes( gsClass );
verify( getClassStatement(), gsClass instanceof IGosuProgram || match( null, '{' ), Res.MSG_EXPECTING_OPEN_BRACE_FOR_CLASS_DEF );
if( !putClassMembersOfSuperAndInterfaces( gsClass ) )
{
gsClass.setDeclarationsBypassed();
return;
}
if( isInnerClass( gsClass ) && !gsClass.isStatic() )
{
addOuterMember( gsClass );
}
addAutomaticEnumMethodsAndProperties( gsClass );
processEnumConstants( gsClass );
for( Object member = parseFunctionOrConstructorOrFieldDeclaration( gsClass );
member != null;
member = parseFunctionOrConstructorOrFieldDeclaration( gsClass ) )
{
popStatement();
if( member instanceof DynamicFunctionSymbol )
{
processFunctionSymbol( (DynamicFunctionSymbol)member, gsClass );
}
else if( member instanceof DynamicPropertySymbol )
{
processPropertySymbol( (DynamicPropertySymbol)member, gsClass );
}
else
{
processVarStmt( gsClass, (VarStatement)member );
}
}
if( !gsClass.isInterface() )
{
if( !gsClass.ensureDefaultConstructor( getSymbolTable(), getOwner() ) )
{
getClassStatement().addParseException( new ParseException( makeFullParserState(),
Res.MSG_NO_DEFAULT_CTOR_IN,
gsClass.getSupertype().getName() ) );
}
}
boolean b = isInnerClass( gsClass ) || match( null, SourceCodeTokenizer.TT_EOF );
verify( getClassStatement(), b, Res.MSG_END_OF_STMT );
gsClass.addDelegateImpls( getSymbolTable(), this );
if( gsClass instanceof IGosuProgramInternal )
{
((IGosuProgramInternal)gsClass).addProgramEntryPoint( getSymbolTable(), this );
}
if( gsClass instanceof IGosuTemplateInternal )
{
((IGosuTemplateInternal)gsClass).addTemplateEntryPoints( getSymbolTable(), this );
}
gsClass.syncGenericAndParameterizedClasses();
gsClass.setDeclarationsCompiled();
if( parseDeclarationsOfLeftOverInnerClasses( gsClass ) )
{
gsClass.setInnerDeclarationsCompiled();
}
}
private void maybeForceRecursiveTypeToAssignSuperTypes( IGosuClassInternal gsClass )
{
if( gsClass.isParameterizedType() )
{
// If this is a recursive type, force super/interface assignment
gsClass.getSupertype();
gsClass.getInterfaces();
}
}
private boolean putClassMembersOfSuperAndInterfaces( IGosuClassInternal gsClass )
{
if( gsClass.isAnnotation() && JavaTypes.ANNOTATION().isAssignableFrom( gsClass ) )
{
// Don't try put members of implicitly extended java.lang.annotation.Annotation
return true;
}
ICompilableTypeInternal enclosingType = gsClass.getEnclosingType();
if( enclosingType instanceof IGosuClassInternal &&
((IGosuClassInternal)enclosingType).isHeaderCompiled() && TypeLord.encloses( enclosingType, getOwner().getGosuClass() ) )
{
enclosingType.putClassMembers( getOwner(), getSymbolTable(), getGosuClass(), gsClass.isStatic() );
}
for( IType type : gsClass.getInterfaces() )
{
if( !(type instanceof ErrorType) )
{
if( !putClassMembers( type ) )
{
return false;
}
}
}
return putClassMembers( gsClass.getSuperClass() );
}
private boolean putClassMembers( IType type )
{
IGosuClassInternal gsType = IGosuClassInternal.Util.getGosuClassFrom( type );
if( gsType != null )
{
gsType.compileDeclarationsIfNeeded();
if( !gsType.isDeclarationsCompiled() )
{
advanceToClassBodyEnd();
// Try again after enclosing class finishes
return false;
}
gsType.putClassMembers( getOwner(), getSymbolTable(), getGosuClass(), false );
}
return true;
}
private boolean parseDeclarationsOfLeftOverInnerClasses( IGosuClassInternal gsClass )
{
int iCount = 0;
int iPriorCount;
Collection<? extends IGosuClass> innerClasses = gsClass.getKnownInnerClassesWithoutCompiling().values();
do
{
iPriorCount = iCount;
iCount = 0;
for( IGosuClass c : innerClasses )
{
IGosuClassInternal innerClass = (IGosuClassInternal)c;
if( !innerClass.isDeclarationsCompiled() || !innerClass.isInnerDeclarationsCompiled() )
{
if( innerClass.getSourceFileHandle() instanceof InnerClassFileSystemSourceFileHandle )
{
int state = getTokenizer().mark();
parseInnerClassDeclaration( innerClass );
getTokenizer().restoreToMark( state );
}
iCount += (innerClass.isDeclarationsCompiled() && innerClass.isInnerDeclarationsCompiled()) ? 0 : 1;
}
}
if( iPriorCount > 0 && iPriorCount == iCount )
{
// Could not decl parse one or more inner classes, must be a cycle; will reparse later
return false;
}
} while( iCount > 0 );
return true;
}
private void addAutomaticEnumMethodsAndProperties( IGosuClassInternal gsClass )
{
if( gsClass.isEnum() )
{
addEnumProperty( gsClass, new EnumCodePropertySymbol( gsClass, TypeSystem.getCompiledGosuClassSymbolTable() ) );
addEnumProperty( gsClass, new EnumDisplayNamePropertySymbol( gsClass, TypeSystem.getCompiledGosuClassSymbolTable() ) );
addEnumProperty( gsClass, new EnumNamePropertySymbol( gsClass, TypeSystem.getCompiledGosuClassSymbolTable() ) );
addEnumProperty( gsClass, new EnumOrdinalPropertySymbol( gsClass, TypeSystem.getCompiledGosuClassSymbolTable() ) );
addEnumProperty( gsClass, new EnumValuePropertySymbol( gsClass, TypeSystem.getCompiledGosuClassSymbolTable() ) );
addEnumProperty( gsClass, new EnumAllValuesPropertySymbol( gsClass, TypeSystem.getCompiledGosuClassSymbolTable() ) );
DynamicFunctionSymbol dfs = new EnumValueOfFunctionSymbol( gsClass, TypeSystem.getCompiledGosuClassSymbolTable() );
gsClass.getParseInfo().addMemberFunction( dfs );
getOwner().putDfsDeclInSetByName( dfs );
dfs = new EnumValuesFunctionSymbol( gsClass, TypeSystem.getCompiledGosuClassSymbolTable() );
gsClass.getParseInfo().addMemberFunction( dfs );
getOwner().putDfsDeclInSetByName( dfs );
}
}
private void addEnumProperty( IGosuClassInternal gsClass, DynamicPropertySymbol dps )
{
gsClass.getParseInfo().addMemberProperty( dps );
getOwner().putDfsDeclInSetByName( dps.getGetterDfs() ); // put in dfs map to prevent overriding by enum impl class
}
private void processEnumConstants( IGosuClassInternal gsClass )
{
boolean bEnum = gsClass != null && gsClass.isEnum();
if( !bEnum )
{
return;
}
Token t = new Token();
int state = getTokenizer().mark();
boolean bAtLeastOneConst = false;
boolean bConst;
do
{
bConst = false;
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
if( match( t, null, SourceCodeTokenizer.TT_WORD, true ) &&
!Keyword.isKeyword( t._strValue ) &&
match( t, SourceCodeTokenizer.TT_WORD ) )
{
VarStatement varStmt = parseEnumConstantDecl( t._strValue );
varStmt.setNameOffset( t.getTokenStart(), t._strValue );
setLocation( iOffset, iLineNum, iColumn );
popStatement();
processVarStmt( gsClass, varStmt );
bAtLeastOneConst = bConst = true;
}
if( match( null, ';' ) )
{
break;
}
} while( bConst && match( null, ',' ) );
if( !bAtLeastOneConst )
{
getTokenizer().restoreToMark( state );
}
}
private VarStatement parseEnumConstantDecl( String strIdentifier )
{
VarStatement varStmt = new VarStatement();
ModifierInfo modifiers = new ModifierInfo( Modifier.PUBLIC | Modifier.STATIC | Modifier.FINAL );
varStmt.setModifierInfo( modifiers );
verify( varStmt, getSymbolTable().getSymbol( strIdentifier ) == null, Res.MSG_VARIABLE_ALREADY_DEFINED, strIdentifier );
if( match( null, null, '(', true ) )
{
eatParenthesized( varStmt, Res.MSG_EXPECTING_RIGHTPAREN_FUNCTION_DEF );
if( match( null, null, '{', true ) )
{
eatStatementBlock( varStmt, Res.MSG_EXPECTING_RIGHTBRACE_STMTBLOCK );
}
}
IType type = getGosuClass();
varStmt.setScope( GlobalScope.EXECUTION );
AbstractDynamicSymbol symbol = new DynamicSymbol( getGosuClass(), getSymbolTable(), strIdentifier, type, null );
modifiers.addAll( symbol.getModifierInfo() );
symbol.setModifierInfo( modifiers );
varStmt.setSymbol( symbol );
varStmt.setEnumConstant( true );
getSymbolTable().putSymbol( symbol );
pushStatement( varStmt );
return varStmt;
}
private void processVarStmt( IGosuClassInternal gsClass, VarStatement varStmt )
{
gsClass.getParseInfo().addMemberField(varStmt);
}
public void processFunctionSymbol( DynamicFunctionSymbol dfs, IGosuClassInternal gsClass )
{
getSymbolTable().putSymbol( dfs );
if( dfs.getDisplayName().equals( gsClass.getRelativeName() ) )
{
gsClass.getParseInfo().addConstructorFunction(dfs);
}
else
{
gsClass.getParseInfo().addMemberFunction(dfs);
}
}
void processPropertySymbol( DynamicPropertySymbol dps, ICompilableTypeInternal gsClass )
{
getSymbolTable().putSymbol( dps );
dps.addMemberSymbols( gsClass );
}
private void addOuterMember( ICompilableTypeInternal gsClass )
{
while( gsClass instanceof IBlockClass )
{
// blocks should never be considered part of the outer hierarchy
gsClass = gsClass.getEnclosingType();
}
DynamicFunctionSymbol dfs = new OuterFunctionSymbol( getSymbolTable(), gsClass );
dfs.setClassMember( true );
DynamicPropertySymbol dps = getOrCreateDynamicPropertySymbol( getClassStatement(), gsClass, dfs, true );
processPropertySymbol( dps, gsClass );
}
private void parseEnhancementBodyDecl( IGosuClassInternal gsClass )
{
try
{
IType type = TypeLoaderAccess.instance().getIntrinsicTypeByFullName( gsClass.getName() );
if( gsClass != type )
{
getClassStatement().addParseException( new ParseException( makeFullParserState(), Res.MSG_DUPLICATE_ENHANCEMENT_FOUND, type.getName() ) );
}
}
catch( ClassNotFoundException e )
{
// ignore
}
verify( getClassStatement(), match( null, '{' ), Res.MSG_EXPECTING_OPEN_BRACE_FOR_CLASS_DEF );
for( Object result = parseFunctionDeclForEnhancement( gsClass );
result != null;
result = parseFunctionDeclForEnhancement( gsClass ) )
{
if( !result.equals( Boolean.FALSE ) )
{
popStatement();
if( result instanceof DynamicFunctionSymbol )
{
DynamicFunctionSymbol dfs = (DynamicFunctionSymbol)result;
getSymbolTable().putSymbol( dfs );
gsClass.getParseInfo().addMemberFunction(dfs);
}
else if( result instanceof DynamicPropertySymbol )
{
getSymbolTable().putSymbol( (DynamicPropertySymbol)result );
((DynamicPropertySymbol)result).addMemberSymbols( gsClass );
}
}
}
verify( getClassStatement(), isInnerClass( gsClass ) || match( null, SourceCodeTokenizer.TT_EOF ), Res.MSG_END_OF_STMT );
gsClass.syncGenericAndParameterizedClasses();
gsClass.setDeclarationsCompiled();
gsClass.setInnerDeclarationsCompiled();
}
public List<ParseException> resolveFunctionAndPropertyDecls( ISymbolTable table )
{
for( Object member = parseFunctionOrConstructorOrFieldDeclaration( null );
member != null; member = parseFunctionOrConstructorOrFieldDeclaration( null ) )
{
popStatement();
if( member instanceof DynamicFunctionSymbol )
{
table.putSymbol( (DynamicFunctionSymbol)member );
}
else if( member instanceof DynamicPropertySymbol )
{
table.putSymbol( (DynamicPropertySymbol)member );
}
}
pushStatement( getClassStatement() );
setLocation( _iClassOffset, _iClassLineNum, _iClassColumn );
popStatement();
//noinspection RedundantCast,unchecked
return (List<ParseException>)(List)getClassStatement().getParseExceptions();
}
private Object parseFunctionDeclForEnhancement( IGosuClassInternal gsClass )
{
int[] location = new int[3];
Object rtn = _parseFunctionDeclForEnhancement( gsClass, location );
if( rtn != null && !Boolean.FALSE.equals( rtn ) )
{
setLocation( location[0], location[1], location[2] );
}
return rtn;
}
private Object _parseFunctionDeclForEnhancement( IGosuClassInternal gsClass, int[] location )
{
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
String strMemberKeyword[] = new String[1];
ModifierInfo modifiers = parseUntilMemberKeyword( strMemberKeyword, false, location );
if( modifiers.getModifiers() == -1 )
{
return null;
}
if( strMemberKeyword[0] != null && strMemberKeyword[0].equals( Keyword.KW_function.toString() ) )
{
FunctionStatement fs = new FunctionStatement();
DynamicFunctionSymbol dfs = getOwner().parseFunctionDecl( fs, false, false, modifiers );
fs.setDynamicFunctionSymbol( dfs );
pushStatement( fs );
verify( fs, !Modifier.isTransient( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_transient, Keyword.KW_function );
if( dfs != null )
{
dfs.setClassMember( true );
}
if( verify( getClassStatement(), !Modifier.isAbstract( modifiers.getModifiers() ), Res.MSG_MODIFIER_ABSTRACT_NOT_ALLOWED_HERE ) )
{
if( !Modifier.isNative( modifiers.getModifiers() ) )
{
eatStatementBlock( fs, Res.MSG_EXPECTING_OPEN_BRACE_FOR_FUNCTION_DEF );
}
}
// verifyTypeVarVariance( Variance.COVARIANT, fs, false, dfs.getType() );
return dfs;
}
else if( strMemberKeyword[0] != null && strMemberKeyword[0].equals( Keyword.KW_property.toString() ) )
{
boolean bGetter = match( null, Keyword.KW_get );
verify( getClassStatement(), bGetter || match( null, Keyword.KW_set ), Res.MSG_EXPECTING_PROPERTY_GET_OR_SET_MODIFIER );
FunctionStatement fs = new FunctionStatement();
DynamicFunctionSymbol dfs = getOwner().parseFunctionDecl( fs, true, bGetter, modifiers );
fs.setDynamicFunctionSymbol( dfs );
pushStatement( fs );
setLocation( iOffset, iLineNum, iColumn );
popStatement();
verify( fs, !Modifier.isTransient( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_transient, Keyword.KW_function );
if( dfs != null )
{
dfs.setClassMember( true );
}
if( verify( getClassStatement(), !Modifier.isAbstract( modifiers.getModifiers() ), Res.MSG_MODIFIER_ABSTRACT_NOT_ALLOWED_HERE ) )
{
if( !Modifier.isNative( modifiers.getModifiers() ) )
{
eatStatementBlock( fs, Res.MSG_EXPECTING_OPEN_BRACE_FOR_FUNCTION_DEF );
}
}
DynamicPropertySymbol dps = dfs == null ? null : getOrCreateDynamicPropertySymbol( getClassStatement(), gsClass, dfs, bGetter );
PropertyStatement statement = new PropertyStatement( fs, dps );
verifyPropertiesAreSymmetric( bGetter, dfs, dps, statement );
pushStatement( statement );
// if( bGetter )
// {
// verifyTypeVarVariance( Variance.COVARIANT, fs, false, dps.getGetterDfs().getReturnType() );
// }
// else if( dps.getSetterDfs().getArgTypes().length > 0 )
// {
// verifyTypeVarVariance( Variance.CONTRAVARIANT, fs, false, dps.getSetterDfs().getArgTypes()[0] );
// }
return dps;
}
else if( strMemberKeyword[0] != null && strMemberKeyword[0].equals( Keyword.KW_var.toString() ) )
{
return Boolean.FALSE;
}
return null;
}
private void parseClassStatementAsEnhancement( IGosuClassInternal gsClass )
{
//## todo: remove this scope?
IGosuEnhancementInternal enhancement = (IGosuEnhancementInternal)gsClass;
getSymbolTable().pushScope();
try
{
verify( getClassStatement(), match( null, '{' ), Res.MSG_EXPECTING_OPEN_BRACE_FOR_CLASS_DEF );
parseClassMembers( gsClass );
for( Statement stmt = peekStatement(); stmt != null; stmt = peekStatement() )
{
stmt = popStatement();
IType enhancedType = enhancement.getEnhancedType();
if( stmt instanceof FunctionStatement )
{
FunctionStatement func = (FunctionStatement)stmt;
if( func.getDynamicFunctionSymbol() != null && !(enhancedType instanceof ErrorType) )
{
ITypeInfo typeInfo = enhancedType.getTypeInfo();
if( typeInfo != null )
{
IMethodInfo mi = typeInfo instanceof IRelativeTypeInfo
? ((IRelativeTypeInfo)typeInfo).getMethod( enhancement, func.getFunctionName(), func.getDynamicFunctionSymbol().getArgTypes() )
: typeInfo.getMethod( func.getFunctionName(), func.getDynamicFunctionSymbol().getArgTypes() );
if( overridesMethodWithDefaultParams(func, typeInfo) )
{
addDeclaredNameParseError( func, Res.MSG_OVERLOADING_NOT_ALLOWED_WITH_OPTIONAL_PARAMS, mi.getDisplayName(), enhancedType.getRelativeName() );
}
else if( (mi != null) && (!featureIsOwnedByEnhancement( enhancement, mi ) || (enhancedType != JavaTypes.OBJECT() && GosuClass.isObjectMethod( mi ))) )
{
addDeclaredNameParseError( func, Res.MSG_CANNOT_OVERRIDE_FUNCTIONS_IN_ENHANCEMENTS, mi.getDisplayName(), enhancedType.getRelativeName() );
}
else if( enhancedType instanceof IGosuClass )
{
String name = func.getFunctionName();
DynamicFunctionSymbol dfs = func.getDynamicFunctionSymbol();
if( name.startsWith( "set" ) && dfs.getArgs().size() == 1 )
{
ITypeInfo ti = enhancedType.getTypeInfo();
IPropertyInfo pi = ((IRelativeTypeInfo)ti).getProperty( enhancement, name.substring( 3, name.length() ) );
if( pi instanceof GosuPropertyInfo )
{
ReducedDynamicPropertySymbol dps = ((GosuPropertyInfo)pi).getDps();
if( dps.getSetterDfs() != null )
{
IType argType = dfs.getArgs().get( 0 ).getType();
if( argType.equals( dps.getType() ) )
{
addDeclaredNameParseError( func, Res.MSG_PROPERTY_AND_FUNCTION_CONFLICT, dfs.getName(), dps.getName() );
}
else if( getOwner().doTypesReifyToTheSameBytecodeType( argType, dps.getType() ) )
{
addDeclaredNameParseError( func, Res.MSG_PROPERTY_AND_FUNCTION_CONFLICT_UPON_REIFICATION, dfs.getName(), dps.getName() );
}
}
}
}
else if( (name.startsWith( "get" ) || name.startsWith( "is" )) && dfs.getArgs().size() == 0 )
{
ITypeInfo ti = enhancedType.getTypeInfo();
IPropertyInfo pi = ((IRelativeTypeInfo)ti).getProperty( enhancement, name.substring( name.startsWith( "get" ) ? 3 : 2, name.length() ) );
if( pi instanceof GosuPropertyInfo )
{
ReducedDynamicPropertySymbol dps = ((GosuPropertyInfo)pi).getDps();
if( dps.getGetterDfs() != null )
{
addDeclaredNameParseError( func, Res.MSG_PROPERTY_AND_FUNCTION_CONFLICT, dfs.getName(), dps.getName() );
}
}
}
}
}
}
}
else if( stmt instanceof PropertyStatement )
{
PropertyStatement prop = (PropertyStatement)stmt;
ITypeInfo typeInfo = enhancedType.getTypeInfo();
if( typeInfo != null && !(enhancedType instanceof ErrorType) )
{
IPropertyInfo pi = typeInfo instanceof IRelativeTypeInfo
? ((IRelativeTypeInfo)typeInfo).getProperty( enhancement, prop.getFunctionName() )
: typeInfo.getProperty( prop.getFunctionName() );
if( pi != null && !featureIsOwnedByEnhancement( enhancement, pi ) )
{
addDeclaredNameParseError( prop, Res.MSG_CANNOT_OVERRIDE_PROPERTIES_IN_ENHANCEMENTS, pi.getDisplayName(), enhancedType.getRelativeName() );
}
else
{
FunctionStatement funcStmt = prop.getPropertyGetterOrSetter();
DynamicFunctionSymbol dfs = funcStmt.getDynamicFunctionSymbol();
String name = dfs.getDisplayName().substring( 1 );
if( dfs.getArgs().size() == 0 )
{
ITypeInfo ti = enhancedType.getTypeInfo();
IMethodInfo mi = ((IRelativeTypeInfo)ti).getMethod( enhancement, "get" + name );
mi = mi == null ? ((IRelativeTypeInfo)ti).getMethod( enhancement, "is" + name ) : mi;
if( mi != null )
{
addDeclaredNameParseError( prop, Res.MSG_PROPERTY_AND_FUNCTION_CONFLICT, mi.getName(), name );
}
}
else if( funcStmt.getParameters().size() > 0 )
{
ITypeInfo ti = enhancedType.getTypeInfo();
for( IMethodInfo mi: ((IRelativeTypeInfo)ti).getMethods( enhancement ) )
{
if( mi.getDisplayName().equals( "set" + name ) && mi.getParameters().length == 1 )
{
IType argType = mi.getParameters()[0].getFeatureType();
if( argType.equals( dfs.getArgTypes()[0] ) )
{
addDeclaredNameParseError( prop, Res.MSG_PROPERTY_AND_FUNCTION_CONFLICT, mi.getName(), dfs.getName() );
}
else if( getOwner().doTypesReifyToTheSameBytecodeType( argType, dfs.getArgTypes()[0] ) )
{
addDeclaredNameParseError( prop, Res.MSG_PROPERTY_AND_FUNCTION_CONFLICT, mi.getName(), dfs.getName() );
}
}
}
}
}
}
}
else if( !(stmt instanceof NoOpStatement ||
stmt instanceof NamespaceStatement ||
stmt instanceof UsesStatement) )
{
ParseException parseException = new ParseException( stmt.getLineNum(), 1, stmt.getLocation().getColumn(), stmt.getLocation().getOffset(), stmt.getLocation().getExtent(),
getSymbolTable(), Res.MSG_ENHANCEMENT_DOES_NOT_ACCEPT_THIS_STATEMENT );
stmt.addParseException( parseException );
}
}
verify( getClassStatement(), match( null, '}' ), Res.MSG_EXPECTING_CLOSE_BRACE_FOR_CLASS_DEF );
}
finally
{
getSymbolTable().popScope();
}
}
void addDeclaredNameParseError( IParsedElementWithAtLeastOneDeclaration stmt, ResourceKey key, Object... args )
{
int nameOffset = stmt.getNameOffset( null );
ParseException parseException = new ParseException( stmt.getLineNum(), 1, stmt.getLocation().getColumn(), nameOffset, nameOffset + ((stmt instanceof VarStatement) ? ((VarStatement)stmt).getIdentifierName().length() : stmt.getFunctionName().length()),
getSymbolTable(), key, args );
stmt.addParseException( parseException );
}
private boolean overridesMethodWithDefaultParams(FunctionStatement func, ITypeInfo typeInfo) {
if( !(typeInfo instanceof IRelativeTypeInfo) )
{
return false;
}
IRelativeTypeInfo rti = (IRelativeTypeInfo) typeInfo;
for( IMethodInfo mi : rti.getMethods( func.getGosuClass() ) )
{
if( mi.getDisplayName().equals( func.getFunctionName() ) && mi instanceof GosuMethodInfo && !featureIsOwnedByEnhancement( func.getGosuClass(), mi ) )
{
final ReducedDynamicFunctionSymbol dfs0 = ((GosuMethodInfo) mi).getDfs();
final DynamicFunctionSymbol dfs1 = func.getDynamicFunctionSymbol();
return dfs0 != null && dfs1 != null && (((IInvocableType) dfs0.getType()).hasOptionalParams() || dfs1.hasOptionalParameters());
}
}
return false;
}
private boolean featureIsOwnedByEnhancement( IGosuClass enhancement, IFeatureInfo iMethodInfo )
{
if( !(enhancement instanceof IGosuEnhancementInternal) )
{
return false;
}
IType ownerType = iMethodInfo.getOwnersType();
if( ownerType != null && ownerType.isParameterizedType() )
{
ownerType = ownerType.getGenericType();
}
IType enhancementType = enhancement;
if( enhancementType != null && enhancementType.isParameterizedType() )
{
enhancementType = enhancementType.getGenericType();
}
if( enhancementType instanceof IGosuEnhancementInternal &&
ownerType instanceof IGosuEnhancementInternal )
{
return GosuObjectUtil.equals( enhancementType.getName(), ownerType.getName() );
}
else
{
return GosuObjectUtil.equals( enhancementType, ownerType );
}
}
String parseHeader( IGosuClassInternal gsClass, boolean bParseEnhancementOnly, boolean bIsAnonymous, boolean bResolveUsesTypes )
{
boolean bPushedScope = pushScopeIfNeeded( gsClass );
if( gsClass.isHeaderCompiled() )
{
((CompilationState)gsClass.getCompilationState()).setReparsingHeader( true );
}
else
{
gsClass.setCompilingHeader( true );
}
getTokenizer().pushOffsetMarker( this );
gsClass.createNewParseInfo();
setClassStatement( gsClass.getParseInfo().getClassStatement() );
ScriptPartId scriptPartId = new ScriptPartId( gsClass, null );
getOwner().pushScriptPart( scriptPartId );
GosuClassCompilingStack.pushCompilingType( gsClass );
try
{
setTokenizerToClassStart();
if( match( null, SourceCodeTokenizer.TT_EOF ) )
{
if( gsClass instanceof IGosuProgram )
{
// Let empty *program* source parse
//## todo: cache and reuse empty program class
gsClass.setSuperType( JavaTypes.OBJECT() );
}
else if( getClassStatement() != null && getClassStatement().getClassFileStatement() != null )
{
getClassStatement().getClassFileStatement().addParseException( new ParseException( makeFullParserState(), Res.MSG_NO_SOURCE_FOUND ) );
}
return null;
}
getOwner().checkInstruction( true );
if( gsClass instanceof IGosuProgram )
{
getOwner().parseProgramClasspathStatements();
getOwner().parseProgramTypeLoaderStatements();
}
getOwner().checkInstruction( true );
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
if( match( null, Keyword.KW_package ) )
{
getOwner().parseNamespaceStatement();
setLocation( iOffset, iLineNum, iColumn );
popStatement();
}
else if( gsClass instanceof IGosuProgram )
{
ISourceFileHandle sfh = gsClass.getSourceFileHandle();
boolean bEval = sfh instanceof StringSourceFileHandle;
if( bEval )
{
ITypeUsesMap typeUsesMap = ((StringSourceFileHandle)sfh).getTypeUsesMap();
if( typeUsesMap != null )
{
getOwner().setTypeUsesMap( typeUsesMap );
}
}
if( gsClass.isAnonymous() )
{
// Anonymous implies Eval program...
gsClass.setEnclosingType( TypeSystem.getByFullNameIfValid( sfh.getParentType() ) );
IType enclosingType = gsClass.getEnclosingTypeReference();
getOwner().setNamespace(enclosingType.getNamespace());
Map<String, ITypeVariableDefinition> capturedTypeVars = bEval ? ((StringSourceFileHandle)sfh).getCapturedTypeVars() : null;
if( capturedTypeVars != null )
{
getOwner().getTypeVariables().putAll( capturedTypeVars );
}
}
else
{
String strNamespace = getGosuClass().getNamespace();
getOwner().setNamespace( strNamespace != null && !strNamespace.isEmpty() ? strNamespace : IGosuProgram.PACKAGE );
}
}
else if( !isInnerClass( gsClass ) )
{
getOwner().setNamespace( "" );
}
getOwner().checkInstruction( true );
getOwner().parseUsesStatementList( bResolveUsesTypes );
if( gsClass.getEnclosingType() == null )
{
// Inner classes start parsing right at the class-stmt, so they must
// get at the uses map from the top-level enclosing class
gsClass.setTypeUsesMap(getOwner().getTypeUsesMap());
}
ClassType classType;
if( gsClass.isAnonymous() && !(gsClass instanceof IGosuProgram) )
{
try
{
classType = parseAnonymousClassHeader( gsClass );
}
catch( InnerClassNotFoundException e )
{
classType = ClassType.Class;
}
_iClassOffset = getTokenizer().getTokenStart();
_iClassLineNum = getTokenizer().getLineNumber();
_iClassColumn = getTokenizer().getTokenColumn();
}
else if( gsClass instanceof IGosuProgram )
{
gsClass.setModifierInfo(new ModifierInfo(Modifier.PUBLIC | Modifier.FINAL));
if( gsClass.isAnonymous() ) // generated 'eval' program
{
final IParsedElement enclosingEvalExpression = ((IGosuProgram) gsClass).getEnclosingEvalExpression();
IParseTree parseTree = enclosingEvalExpression == null ? null : enclosingEvalExpression.getLocation();
IFunctionStatement fs = (parseTree == null ? null : parseTree.getEnclosingFunctionStatement());
if( (fs != null && fs.getDynamicFunctionSymbol().isStatic()) ||
// Note a null enclosingEvalExpression implies this anon program is a bare expression that is artificially
// executed as though it were defined somewhere within the enclosing class e.g., an old-style Gosu annotation,
// therefore the expression needs private access to the outer class and must be compiled as a static,
// yet anonymous, inner class
enclosingEvalExpression == null )
{
((ModifierInfo)gsClass.getModifierInfo()).addModifiers( Modifier.STATIC );
}
}
// Optional 'extends' clause for specifying Super Class for a program
parseProgramExtendsStatement( gsClass, bResolveUsesTypes );
classType = ClassType.Class;
}
else
{
getOwner().checkInstruction( true );
_iClassOffset = getTokenizer().getTokenStart();
_iClassLineNum = getTokenizer().getLineNumber();
_iClassColumn = getTokenizer().getTokenColumn();
if( !bIsAnonymous )
{
classType = parseClassType( gsClass, true );
if( classType == ClassType.Interface || classType == ClassType.Structure || classType == ClassType.Annotation )
{
((ModifierInfo)gsClass.getModifierInfo()).addModifiers( Modifier.ABSTRACT );
}
else if( classType == ClassType.Enum )
{
((ModifierInfo)gsClass.getModifierInfo()).addModifiers( Modifier.FINAL );
}
if( classType == ClassType.Annotation )
{
gsClass.addInterface( JavaTypes.ANNOTATION() );
}
}
else
{
classType = parseClassTypeForHeader( gsClass );
}
}
if( classType == null )
{
if( bParseEnhancementOnly )
{
return null;
}
verify( getClassStatement(), false, Res.MSG_EXPECTING_NAME_CLASS_DEF );
}
if( classType == ClassType.Enhancement )
{
if( gsClass instanceof IGosuEnhancementInternal )
{
IGosuEnhancementInternal scriptEnhancement = (IGosuEnhancementInternal)gsClass;
scriptEnhancement.setFoundCorrectHeader();
return parseEnhancementHeaderSuffix( scriptEnhancement );
}
else
{
getClassStatement().addParseException( new ParseException( makeFullParserState(), Res.MSG_MUST_BE_DEFINED_AS_CLASS ) );
return null;
}
}
else if( classType != null && !bParseEnhancementOnly )
{
if( classType == ClassType.Enum )
{
gsClass.setEnum();
}
return parseClassOrInterfaceHeaderSuffix( gsClass, classType, bResolveUsesTypes );
}
else
{
return null;
}
}
finally
{
boolean bHeaderCompiled;
try
{
bHeaderCompiled = gsClass.isHeaderCompiled();
if( !bHeaderCompiled )
{
parseInnerClassHeaders( gsClass, bResolveUsesTypes );
}
}
finally
{
GosuClassCompilingStack.popCompilingType();
}
getOwner().popScriptPart( scriptPartId );
((CompilationState)gsClass.getCompilationState()).setReparsingHeader( false );
gsClass.setCompilingHeader( false );
gsClass.setHeaderCompiled();
popScopeIfNeeded( bPushedScope, gsClass );
getTokenizer().popOffsetMarker( this );
if( !bHeaderCompiled )
{
removeTypeVarsFromParserMap( gsClass );
}
}
}
private void removeTypeVarsFromParserMap( IGosuClassInternal gsClass )
{
for( IGenericTypeVariable gtv : gsClass.getGenericTypeVariables() )
{
ITypeVariableDefinition typeVarDef = gtv.getTypeVariableDefinition();
Map<String, ITypeVariableDefinition> typeVarMap = getOwner().getTypeVariables();
if( typeVarMap.containsValue( typeVarDef ) )
{
typeVarMap.remove( typeVarDef.getName() );
}
}
}
private boolean pushScopeIfNeeded( final IGosuClassInternal gsClass )
{
ISymbolTable compilingClass = CompiledGosuClassSymbolTable.instance().getSymbolTableForCompilingClass( gsClass );
if( compilingClass != null )
{
return false;
}
// *barf*
if( gsClass.getParser() != null )
{
CompiledGosuClassSymbolTable.instance().pushCompileTimeSymbolTable( gsClass, gsClass.getParser().getSymbolTable() );
}
else
{
CompiledGosuClassSymbolTable.instance().pushCompileTimeSymbolTable( gsClass );
}
getSymbolTable().pushIsolatedScope(new GosuClassTransparentActivationContext(gsClass, false));
return true;
}
private void popScopeIfNeeded( boolean bPop, IGosuClass gsClass )
{
if( bPop )
{
getSymbolTable().popScope();
CompiledGosuClassSymbolTable.instance().popCompileTimeSymbolTable( gsClass );
}
}
private void setTokenizerToClassStart()
{
if( isInnerClass( getGosuClass() ) )
{
getTokenizer().reset();
}
if( !getTokenizer().isPositioned() )
{
getTokenizer().nextToken();
}
}
private ClassType parseAnonymousClassHeader( IGosuClassInternal gsClass )
{
ClassType classType = ClassType.Class;
ParsedElement elem;
if( match( null, null, '(', true ) )
{
elem = getClassStatement();
}
else if( !getOwner().parseTypeLiteral() )
{
throw new InnerClassNotFoundException();
}
else
{
elem = popExpression();
}
eatParenthesized( elem, Res.MSG_EXPECTING_FUNCTION_CLOSE );
//getLocationsList().remove( superTypeLiteral.getLocation() ); // rely on the new-expr to keep the type literal *it* parses
return classType;
}
private boolean goToPosition( int iOffset )
{
try
{
getTokenizer().goToPosition( iOffset );
return true;
}
catch( IOException e )
{
//noinspection ThrowableResultOfMethodCallIgnored
getClassStatement().addParseException( ParseException.wrap( e, makeFullParserState() ) );
}
return false;
}
private ClassType parseClassTypeForHeader( IGosuClassInternal gsClass )
{
while( true )
{
if( match( null, SourceCodeTokenizer.TT_EOF ) )
{
return null;
}
ClassType classType = parseClassType( gsClass, !gsClass.isDeclarationsCompiled() );
if( classType != null )
{
return classType;
}
getTokenizer().nextToken();
}
}
private ClassType parseClassType( IGosuClassInternal gsClass, boolean bSetModifiers )
{
ModifierInfo modifiers = parseModifiersForClass( gsClass, bSetModifiers );
if( !Modifier.isInternal( modifiers.getModifiers() )
&& !Modifier.isProtected( modifiers.getModifiers() )
&& !Modifier.isPrivate( modifiers.getModifiers() ) )
{
modifiers.addModifiers( Modifier.PUBLIC );
}
ClassType classType = null;
if( match( null, Keyword.KW_enhancement ) )
{
classType = ClassType.Enhancement;
if( bSetModifiers )
{
verify( getClassStatement(), !Modifier.isPrivate( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_private, classType.name() );
verify( getClassStatement(), !Modifier.isProtected( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_protected, classType.name() );
verify( getClassStatement(), !Modifier.isInternal( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_internal, classType.name() );
verify( getClassStatement(), !Modifier.isFinal( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_final, classType.name() );
verify( getClassStatement(), !Modifier.isAbstract( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_abstract, classType.name() );
verify( getClassStatement(), !Modifier.isTransient( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_transient, classType.name() );
verifyNoAbstractHideOverrideStaticModifierDefined( getClassStatement(), false, modifiers.getModifiers(), Keyword.KW_enhancement );
gsClass.setModifierInfo(modifiers);
}
}
else if( match( null, Keyword.KW_interface ) )
{
classType = ClassType.Interface;
if( bSetModifiers )
{
verify( getClassStatement(), !Modifier.isHide( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_hide, classType.name() );
verify( getClassStatement(), !Modifier.isOverride( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_override, classType.name() );
verify( getClassStatement(), !Modifier.isFinal( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_final, classType.name() );
verify( getClassStatement(), !Modifier.isTransient( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_transient, classType.name() );
if( gsClass.getEnclosingType() != null )
{
modifiers.addModifiers( Modifier.STATIC );
}
gsClass.setModifierInfo(modifiers);
}
}
else if( match( null, Keyword.KW_structure ) )
{
classType = ClassType.Structure;
if( bSetModifiers )
{
verify( getClassStatement(), !Modifier.isHide( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_hide, classType.name() );
verify( getClassStatement(), !Modifier.isOverride( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_override, classType.name() );
verify( getClassStatement(), !Modifier.isFinal( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_final, classType.name() );
verify( getClassStatement(), !Modifier.isTransient( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_transient, classType.name() );
if( gsClass.getEnclosingType() != null )
{
modifiers.addModifiers( Modifier.STATIC );
}
gsClass.setModifierInfo(modifiers);
}
}
else if( match( null, Keyword.KW_annotation ) )
{
classType = ClassType.Annotation;
if( bSetModifiers )
{
verify( getClassStatement(), !Modifier.isHide( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_hide, classType.name() );
verify( getClassStatement(), !Modifier.isOverride( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_override, classType.name() );
verify( getClassStatement(), !Modifier.isFinal( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_final, classType.name() );
verify( getClassStatement(), !Modifier.isTransient( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_transient, classType.name() );
modifiers.addModifiers( Modifier.ANNOTATION );
if( gsClass.getEnclosingType() != null )
{
modifiers.addModifiers( Modifier.STATIC );
}
gsClass.setModifierInfo(modifiers);
}
}
else if( match( null, Keyword.KW_class ) )
{
classType = ClassType.Class;
if( bSetModifiers )
{
verify( getClassStatement(), !Modifier.isHide( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_hide, classType.name() );
verify( getClassStatement(), !Modifier.isOverride( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_override, classType.name() );
verify( getClassStatement(), !Modifier.isTransient( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_transient, classType.name() );
gsClass.setModifierInfo(modifiers);
}
}
else if( match( null, Keyword.KW_enum ) )
{
classType = ClassType.Enum;
if( bSetModifiers )
{
verifyNoAbstractHideOverrideModifierDefined( getClassStatement(), false, modifiers.getModifiers(), Keyword.KW_final );
verify( getClassStatement(), !Modifier.isFinal( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_final, classType.name() );
verify( getClassStatement(), !Modifier.isTransient( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_transient, classType.name() );
gsClass.setModifierInfo(modifiers);
}
}
if( gsClass.shouldFullyCompileAnnotations() )
{
verifyModifiers( getClassStatement(), modifiers, UsageTarget.TypeTarget );
}
gsClass.setFullDescription( modifiers.getDescription() );
if( bSetModifiers && classType != null && gsClass.getEnclosingType() == null )
{
verify( getClassStatement(), !Modifier.isPrivate( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_private, classType.name() );
verify( getClassStatement(), !Modifier.isProtected( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_protected, classType.name() );
verify( getClassStatement(), !Modifier.isStatic( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_static, classType.name() );
}
return classType;
}
private ModifierInfo parseModifiersForClass( IGosuClassInternal gsClass, boolean bSetModifiers )
{
ModifierInfo modifiers;ICompilableTypeInternal enclosingType = gsClass.getEnclosingType();
if( enclosingType instanceof IGosuClassInternal && ((IGosuClassInternal)enclosingType).isDeclarationsCompiled() )
{
// push static class symbols for annotations (they are part of modifier parsing)
ClassScopeCache scopeCache = makeClassScopeCache( (IGosuClassInternal)enclosingType );
pushClassSymbols( true, scopeCache );
try
{
modifiers = parseModifiers( !bSetModifiers );
}
finally
{
popClassSymbols();
}
}
else
{
modifiers = parseModifiers( !bSetModifiers );
}
return modifiers;
}
private String parseClassOrInterfaceHeaderSuffix( IGosuClassInternal gsClass, ClassType classType, boolean bResolveTypes )
{
String strClassName;
IGosuClassInternal gosuObjectInterface = getGosuObjectInterface();
if (gosuObjectInterface == null) {
return gsClass.getName();
}
if( gsClass instanceof IGosuProgram )
{
gsClass.addInterface(gosuObjectInterface);
if( !gsClass.isAnonymous() )
{
IType type = parseEnhancedOrImplementedType( gsClass, false, Collections.<IType>emptyList() );
gsClass.setSuperType( type );
}
strClassName = gsClass.getName();
}
else if( gsClass.isAnonymous() )
{
gsClass.addInterface(gosuObjectInterface);
strClassName = gsClass.getName();
if( gsClass.isHeaderCompiled() )
{
ClassDeclaration classDeclaration = new ClassDeclaration( strClassName );
pushExpression( classDeclaration );
SourceCodeTokenizer tokenizer = getOwner().getTokenizer();
setLocation( tokenizer.getTokenStart(), tokenizer.getLineNumber(), tokenizer.getTokenColumn(), true, true );
popExpression();
getClassStatement().setClassDeclaration( classDeclaration );
// makeSyntheticClassDeclaration( strClassName, false );
}
}
else
{
boolean bStructure = classType.equals( ClassType.Structure );
boolean bAnnotation = classType.equals( ClassType.Annotation );
boolean bInterface = bStructure || bAnnotation || classType.equals( ClassType.Interface );
gsClass.setInterface( bInterface );
gsClass.setStructure( bStructure );
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
Token t = new Token();
verify( getClassStatement(), match( t, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_NAME_CLASS_DEF );
strClassName = t._strValue;
String strNamespace;
if( isTopLevelClass( getGosuClass() ) )
{
strNamespace = getOwner().getNamespace();
}
else
{
strNamespace = getGosuClass().getEnclosingType().getName();
}
strClassName = GosuStringUtil.isEmpty(strNamespace)
? strClassName
: strNamespace + '.' + strClassName;
if( gsClass.getEnclosingTypeReference() == null && strClassName != null && !strClassName.equals( gsClass.getName() ) )
{
verify( getClassStatement(), false, Res.MSG_WRONG_CLASSNAME, strClassName, gsClass.getName() );
}
if( strClassName != null && gsClass.isHeaderCompiled() )
{
ClassDeclaration classDeclaration = new ClassDeclaration( strClassName );
pushExpression( classDeclaration );
setLocation( iOffset, iLineNum, iColumn );
popExpression();
getClassStatement().setClassDeclaration( classDeclaration );
}
List<ITypeVariableDefinitionExpression> typeVarLiteralList = getOwner().parseTypeVariableDefs( getClassStatement(), false, getDeclTypeVars() );
gsClass.setGenericTypeVariables((List)typeVarLiteralList);
if( gsClass.isEnum() )
{
verify( getClassStatement(), typeVarLiteralList.isEmpty(), Res.MSG_ENUM_MAY_NOT_HAVE_TYPEPARAM );
}
iOffset = getTokenizer().getTokenStart();
iLineNum = getTokenizer().getLineNumber();
iColumn = getTokenizer().getTokenColumn();
if( !bInterface && (match( null, Keyword.KW_extends ) || gsClass.isEnum()) )
{
IType superType = parseEnhancedOrImplementedType( gsClass, true, Collections.<IType>emptyList() );
if( superType instanceof IGosuClassInternal )
{
if( bResolveTypes )
{
((IGosuClassInternal)superType).compileDeclarationsIfNeeded();
}
}
gsClass.setSuperType( superType );
if( gsClass.getCompilationState().isCompilingDeclarations() &&
gsClass.isGenericType() )
{
verify( getClassStatement(), !JavaTypes.THROWABLE().isAssignableFrom( superType ) , Res.MSG_INVALID_GENERIC_EXCEPTION );
}
SuperTypeClause extendsClause = new SuperTypeClause( superType );
pushExpression( extendsClause );
if( gsClass.isDeclarationsCompiled() )
{
verifySuperTypeVarVariance( getClassStatement(), superType );
}
setLocation( iOffset, iLineNum, iColumn );
popExpression();
iOffset = getTokenizer().getTokenStart();
iLineNum = getTokenizer().getLineNumber();
iColumn = getTokenizer().getTokenColumn();
}
boolean hasImplements = false;
if( (bInterface && match( null, Keyword.KW_extends )) ||
(hasImplements = match( null, Keyword.KW_implements )) )
{
if( verify( getClassStatement(), !bInterface || !hasImplements, Res.MSG_NO_IMPLEMENTS_ALLOWED ) )
{
verify( getClassStatement(), !bAnnotation, Res.MSG_NO_EXTENDS_ALLOWED );
}
List<IType> interfaces = new ArrayList<IType>();
do
{
IType type = parseEnhancedOrImplementedType( gsClass, bInterface, interfaces );
gsClass.addInterface( type );
if( gsClass.isDeclarationsCompiled() )
{
verifySuperTypeVarVariance( getClassStatement(), type );
}
interfaces.add( type );
} while( match( null, ',' ) );
InterfacesClause interfacesClause = new InterfacesClause( gsClass, interfaces.toArray( new IType[interfaces.size()] ) );
pushExpression( interfacesClause );
setLocation( iOffset, iLineNum, iColumn );
popExpression();
}
if( classType == ClassType.Class || classType == ClassType.Interface || classType == ClassType.Structure )
{
IGosuClassInternal gsObjectInterace = gosuObjectInterface;
if( (!gsClass.isInterface() || !interfaceExtendsGosuObject( gsClass, gsObjectInterace )) && !gsClass.getName().startsWith( IGosuClass.PROXY_PREFIX ) )
{
gsClass.addInterface( gsObjectInterace );
}
}
else if( classType == ClassType.Enum )
{
gsClass.addInterface(gosuObjectInterface);
}
}
if( (isTopLevelClass( gsClass ) ||
gsClass instanceof IGosuProgram ||
// Anonymous classes can have inner classes
gsClass.isAnonymous()) &&
!gsClass.isHeaderCompiled() )
{
// Recursively *load* (no parsing) all nested inner types from the top-level class file
int state = getTokenizer().mark();
loadAllNestedInnerClasses( gsClass );
getTokenizer().restoreToMark( state );
}
return strClassName;
}
private boolean interfaceExtendsGosuObject( IGosuClassInternal gsClass, IGosuClassInternal gsObjectInterace )
{
if( gsClass == gsObjectInterace )
{
return true;
}
for( IType iface: gsClass.getInterfaces() )
{
if( iface instanceof IGosuClass )
{
return true;
}
}
return false;
}
private List<TypeVariableDefinitionImpl> getDeclTypeVars()
{
IGosuClass gsClass = getGosuClass();
if( !gsClass.isDeclarationsCompiled() )
{
return Collections.emptyList();
}
IGenericTypeVariable[] typeVars = gsClass.getGenericTypeVariables();
if( typeVars == null )
{
return Collections.emptyList();
}
List<TypeVariableDefinitionImpl> result = new ArrayList<TypeVariableDefinitionImpl>( typeVars.length );
for( IGenericTypeVariable typeVar : typeVars )
{
result.add( (TypeVariableDefinitionImpl)typeVar.getTypeVariableDefinition() );
}
return result;
}
private void makeSyntheticClassDeclaration( String strClassName, boolean bProgram )
{
ClassDeclaration classDeclaration = new ClassDeclaration( strClassName );
pushExpression( classDeclaration );
SourceCodeTokenizer tokenizer = getOwner().getTokenizer();
setLocation( bProgram ? 0 : tokenizer.getTokenStart(), tokenizer.getLineNumber(), bProgram ? 0 : tokenizer.getTokenColumn(), true, true );
popExpression();
getClassStatement().setClassDeclaration( classDeclaration );
}
private void parseInnerClassHeaders( IGosuClassInternal gsClass, boolean bResolveTypes )
{
Map<CharSequence, ? extends IGosuClass> innerClassesByName = gsClass.getKnownInnerClassesWithoutCompiling();
if( innerClassesByName.isEmpty() )
{
return;
}
int state = getTokenizer().mark();
int iLocationsSize = getLocationsList().size();
try
{
for( CharSequence name : innerClassesByName.keySet() )
{
IGosuClassInternal innerClass = (IGosuClassInternal)innerClassesByName.get( name );
if( !(innerClass instanceof IBlockClass) )
{
innerClass.createNewParseInfo();
new GosuClassParser( getOwner(), innerClass ).parseHeader( innerClass, false, false, bResolveTypes );
}
}
}
finally
{
while( getLocationsList().size() > iLocationsSize )
{
getLocationsList().remove( getLocationsList().size()-1 );
}
getTokenizer().restoreToMark( state );
}
}
private void loadAllNestedInnerClasses( IGosuClassInternal gsClass )
{
String[] strMemberKeyword = new String[1];
if( !(gsClass instanceof IGosuProgram) )
{
advanceToClassBodyStart();
}
ModifierInfo modifiers;
while( true )
{
int[] location = new int[3];
int[] mark = new int[]{-1};
modifiers = parseUntilMemberKeyword( strMemberKeyword, true, -1, location, mark );
if( modifiers.getModifiers() == -1 )
{
if( getTokenizer().isEOF() )
{
break;
}
if( !isInnerClassesEmpty() ) // can be empty e.g., errors with unmatching braces
{
IGosuClassInternal innerClass = popInnerClass( getCurrentInnerClass() );
innerClass.getSourceFileHandle().setEnd( location[0] );
}
else if( gsClass.isAnonymous() )
{
break;
}
}
else
{
ClassType classType = getClassType( strMemberKeyword[0] );
if( classType != null )
{
IGosuClassInternal innerClass = loadNextInnerClass( gsClass, classType );
if( innerClass == null )
{
break;
}
innerClass.getSourceFileHandle().setOffset( location[0] );
((InnerClassFileSystemSourceFileHandle)innerClass.getSourceFileHandle()).setMark( mark[0] );
pushInnerClass( innerClass );
}
}
}
}
private ClassType getClassType( String strValue )
{
return
Keyword.KW_class.toString().equals( strValue )
? ClassType.Class
: Keyword.KW_interface.equals( strValue )
? ClassType.Interface
: Keyword.KW_annotation.equals( strValue )
? ClassType.Annotation
: Keyword.KW_structure.equals( strValue )
? ClassType.Structure
: Keyword.KW_enum.toString().equals( strValue )
? ClassType.Enum
: null;
}
private void advanceToClassBodyStart()
{
while( true )
{
if( match( null, '{' ) )
{
break;
}
if( match( null, SourceCodeTokenizer.TT_EOF ) )
{
break;
}
getTokenizer().nextToken();
}
}
private void advanceToClassBodyEnd()
{
int iEnd = getGosuClass().getSourceFileHandle().getEnd();
if( iEnd <= 0 )
{
//assert isTopLevelClass( getGosuClass() ) || isEvalClass() : "Inner class does not have an 'end' marker";
return;
}
try
{
//## perf: this is very slow, maybe use a tokenizer mark instead
getTokenizer().goToPosition( iEnd );
}
catch( IOException e )
{
throw new RuntimeException( e );
}
verify( getClassStatement(), match( null, '}' ), Res.MSG_EXPECTING_CLOSE_BRACE_FOR_CLASS_DEF );
}
private IGosuClassInternal loadNextInnerClass( IGosuClassInternal gsClass, ClassType classType )
{
Token T = new Token();
IGosuClassInternal enclosingGsClass = getGosuClass();
if( verify( getClassStatement(), match( T, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_NAME_CLASS_DEF ) )
{
enclosingGsClass = getCurrentInnerClass() != null ? getCurrentInnerClass() : enclosingGsClass;
String strInnerClass = T._strValue;
IGosuClassInternal innerGsClass;
innerGsClass = (IGosuClassInternal)enclosingGsClass.getKnownInnerClassesWithoutCompiling().get( strInnerClass );
if( innerGsClass != null )
{
// Duplicate inner class name
getClassStatement().addParseException( new ParseException( makeFullParserState(), Res.MSG_DUPLICATE_CLASS_FOUND, strInnerClass ) );
strInnerClass = strInnerClass + "_duplicate_" + nextIndexOfErrantDuplicateInnerClass( enclosingGsClass, innerGsClass );
}
innerGsClass = (IGosuClassInternal)gsClass.getTypeLoader().makeNewClass(
new InnerClassFileSystemSourceFileHandle( classType, enclosingGsClass.getName(), strInnerClass, gsClass.isTestClass() ) );
innerGsClass.setEnclosingType( enclosingGsClass );
innerGsClass.setNamespace( enclosingGsClass.getNamespace() );
enclosingGsClass.addInnerClass( innerGsClass );
advanceToClassBodyStart();
return innerGsClass;
}
return null;
}
public int nextIndexOfErrantDuplicateInnerClass( IGosuClassInternal enclosingGsClass, IGosuClassInternal innerClass )
{
int iMax = -1;
String strName = innerClass.getRelativeName() + "_duplicate_";
while( true )
{
IType existingInnerClass = enclosingGsClass.getKnownInnerClassesWithoutCompiling().get( strName + ++iMax );
if( existingInnerClass == null )
{
return iMax;
}
}
}
private IGosuClassInternal getGosuObjectInterface()
{
return IGosuClassInternal.Util.getGosuClassFrom( JavaTypes.IGOSU_OBJECT() );
}
private String parseEnhancementHeaderSuffix( IGosuEnhancementInternal gsClass )
{
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
Token t = new Token();
verify( getClassStatement(), match( t, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_NAME_CLASS_DEF );
String strClassName = t._strValue;
strClassName = GosuStringUtil.isEmpty(getOwner().getNamespace())
? strClassName
: getOwner().getNamespace() + '.' + strClassName;
ClassDeclaration classDeclaration = new ClassDeclaration( strClassName );
pushExpression( classDeclaration );
setLocation( iOffset, iLineNum, iColumn );
popExpression();
getClassStatement().setClassDeclaration( classDeclaration );
if( gsClass.getEnclosingTypeReference() == null && !strClassName.equals( gsClass.getName() ) )
{
verify( getClassStatement(), false, Res.MSG_WRONG_CLASSNAME, strClassName, gsClass.getName() );
}
List<ITypeVariableDefinitionExpression> typeVarLiteralList = getOwner().parseTypeVariableDefs( getClassStatement(), false, getDeclTypeVars() );
gsClass.setGenericTypeVariables((List)typeVarLiteralList);
verify( getClassStatement(), match( null, ":", SourceCodeTokenizer.TT_OPERATOR ), Res.MSG_EXPECTING_COLON_ENHANCEMENT );
IType enhancedType = parseEnhancedOrImplementedType( gsClass, true, Collections.<IType>emptyList() );
if( !(enhancedType instanceof ErrorType ||
enhancedType instanceof IEnhanceableType) )
{
verify( getClassStatement(), false, Res.MSG_NOT_AN_ENHANCEABLE_TYPE, enhancedType.getName() );
}
gsClass.setEnhancedType( enhancedType );
ensureEnhancedTypeUsesTypeVarsOfEnhancement( typeVarLiteralList, enhancedType );
return strClassName;
}
private void ensureEnhancedTypeUsesTypeVarsOfEnhancement( List<ITypeVariableDefinitionExpression> typeVarLiteralList, IType enhancedType )
{
if( typeVarLiteralList.isEmpty() )
{
return;
}
for( ITypeVariableDefinitionExpression expr: typeVarLiteralList )
{
boolean bReferencedByOtherTypeVar = false;
for( ITypeVariableDefinitionExpression expr2: typeVarLiteralList )
{
if( expr2 != expr )
{
if( hasTypeVar( expr2.getTypeVarDef().getBoundingType(), expr.getTypeVarDef().getType() ) )
{
bReferencedByOtherTypeVar = true;
break;
}
}
}
verify( getClassStatement(), bReferencedByOtherTypeVar || hasTypeVar( enhancedType, expr.getTypeVarDef().getType() ), Res.MSG_ENHANCED_TYPE_MUST_USE_ENHANCEMENT_TYPEVARS );
}
}
private boolean hasTypeVar( IType type, ITypeVariableType typeVar )
{
if( type == null )
{
return false;
}
if( type.isArray() )
{
type = TypeLord.getCoreType( type );
}
if( type == null || type.equals( typeVar ) )
{
return true;
}
if( type.isParameterizedType() )
{
for( IType typeParam: type.getTypeParameters() )
{
if( hasTypeVar( typeParam, typeVar ) )
{
return true;
}
}
}
return false;
}
void parseProgramExtendsStatement( IGosuClassInternal gsClass, boolean bResolveTypes )
{
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getLineOffset();
if( match( new Token(), Keyword.KW_extends ) )
{
IType superType = parseEnhancedOrImplementedType( gsClass, true, Collections.<IType>emptyList() );
SuperTypeClause stmt = new SuperTypeClause( superType );
if( superType instanceof IGosuClassInternal )
{
if( bResolveTypes )
{
((IGosuClassInternal)superType).compileDeclarationsIfNeeded();
}
}
pushExpression( stmt );
try
{
setLocation( iOffset, iLineNum, iColumn );
}
finally
{
popExpression();
}
ITypeInfo typeInfo = superType.getTypeInfo();
if( !(superType instanceof IErrorType) && typeInfo instanceof IRelativeTypeInfo )
{
IConstructorInfo noArgCtor = ((IRelativeTypeInfo)typeInfo).getConstructor( gsClass, null );
if( verify( stmt, noArgCtor != null, Res.MSG_NO_DEFAULT_CTOR_IN, superType.getName() ) )
{
gsClass.setSuperType( superType );
}
}
}
}
private IType parseEnhancedOrImplementedType( IGosuClassInternal gsClass, boolean bExtended, List<IType> interfaces )
{
IType extendedType = null;
TypeLiteral extendedTypeExpr = null;
if( gsClass instanceof IGosuProgram && !bExtended )
{
extendedType = gsClass.getSupertype() != null ? gsClass.getSupertype() : JavaTypes.OBJECT();
}
else if( !gsClass.isEnum() || !bExtended )
{
getOwner().parseTypeLiteral( !(gsClass instanceof IGosuEnhancementInternal) && (gsClass.isInterface() || !bExtended) );
extendedTypeExpr = (TypeLiteral)popExpression();
extendedType = extendedTypeExpr.getType().getType();
if( !verify( extendedTypeExpr, !extendedType.isCompoundType(), Res.MSG_COMPOUND_TYPE_NOT_ALLOWED_HERE ) )
{
extendedType = ErrorType.getInstance();
}
if( !(extendedType instanceof ErrorType) )
{
if( !(gsClass instanceof IGosuEnhancementInternal) )
{
if( gsClass.isInterface() )
{
verify( extendedTypeExpr, extendedType.isInterface(), Res.MSG_INTERFACE_CANNOT_EXTEND_CLASS );
}
else if( bExtended )
{
if( verify( extendedTypeExpr, !extendedType.isInterface(), Res.MSG_CLASS_CANNOT_EXTEND_INTERFACE ) )
{
verify( extendedTypeExpr, !gsClass.isEnum(), Res.MSG_ENUM_CANNOT_EXTEND_CLASS );
verify( extendedTypeExpr, extendedType != JavaTypes.OBJECT(), Res.MSG_SUBCLASS_OBJECT, gsClass.getRelativeName() );
verify( extendedTypeExpr, !extendedType.isArray(), Res.MSG_CANNOT_EXTEND_ARRAY, extendedType.getRelativeName() );
}
}
else
{
verify( extendedTypeExpr, extendedType.isInterface(), Res.MSG_CLASS_CANNOT_IMPLEMENT_CLASS );
}
verify( extendedTypeExpr, !extendedType.isPrimitive(), Res.MSG_CANNOT_EXTEND_PRIMITIVE_TYPE );
verify( extendedTypeExpr, !extendedType.isFinal(), Res.MSG_CANNOT_EXTEND_FINAL_TYPE, extendedType.getName() );
if( verify( extendedTypeExpr, !interfaces.contains( extendedType ), Res.MSG_DUPLICATE_CLASS_FOUND, extendedType.getRelativeName() ) )
{
IType[] conflict = inheritsWithDifferentTypeParams( gsClass.getSupertype(), interfaces, extendedType );
if( conflict != null )
{
extendedTypeExpr.addParseException( Res.MSG_INHEREITED_WITH_DIFF_ARG_TYPES, TypeLord.getPureGenericType( conflict[0] ).getName(), Arrays.toString( conflict[0].getTypeParameters() ) + " , " + Arrays.toString( conflict[1].getTypeParameters() ) );
}
}
if( isCyclicInheritance( extendedType, gsClass ) )
{
extendedType = ErrorType.getInstance( extendedType.getName() );
verify( extendedTypeExpr, false, Res.MSG_CYCLIC_INHERITANCE, extendedType.getName() );
}
}
else
{
if( extendedType instanceof IGosuEnhancementInternal )
{
verify( extendedTypeExpr, false, Res.MSG_ENHANCEMENTS_CANNOT_ENHANCE_OTHER_ENHANCEMENTS, extendedType.getName() );
}
}
}
}
else if( gsClass.isEnum() )
{
extendedType = JavaTypes.ENUM();
extendedType = extendedType.getParameterizedType( gsClass );
}
makeProxy( gsClass, extendedType );
extendedType = TypeLord.makeDefaultParameterizedType( extendedType );
if( !verify( extendedTypeExpr == null ? getClassStatement() : extendedTypeExpr,
(!extendedType.isGenericType() || extendedType instanceof IGosuClass && !((IGosuClass) extendedType).isHeaderCompiled()) ||
extendedType.isParameterizedType() || gsClass instanceof IGosuEnhancementInternal,
Res.MSG_CANNOT_EXTEND_RAW_GENERIC_TYPE, extendedType.getName() ) )
{
// If we are unable to resolve a parameterized type, extend the error type
extendedType = ErrorType.getInstance();
}
if( bExtended && !(gsClass instanceof IGosuEnhancementInternal) )
{
verify( extendedTypeExpr == null ? getClassStatement() : extendedTypeExpr,
Modifier.isStatic( extendedType.getModifiers() ) || extendedType.getEnclosingType() == null ||
TypeLord.enclosingTypeInstanceInScope( extendedType.getEnclosingType(), getGosuClass() ),
Res.MSG_NO_ENCLOSING_INSTANCE_IN_SCOPE, extendedType.getEnclosingType() );
}
if( !(extendedType instanceof ErrorType) )
{
if( gsClass.isDeclarationsCompiled() )
{
verifySuperTypeVarVariance( getClassStatement(), extendedType );
}
}
return extendedType;
}
private IType[] inheritsWithDifferentTypeParams( IType superType, List<IType> interfaces, IType iface )
{
if( superType != null )
{
IType[] conflict = inheritsWithDifferentTypeParams( null, Arrays.asList( superType.getInterfaces() ), iface );
if( conflict != null )
{
return conflict;
}
}
IType rawIface = TypeLord.getPureGenericType( iface );
for( IType csr: interfaces )
{
if( TypeLord.getPureGenericType( csr ) == rawIface && csr != iface )
{
return new IType[] {csr, iface};
}
IType[] conflict = inheritsWithDifferentTypeParams( null, Arrays.asList( csr.getInterfaces() ), iface );
if( conflict != null )
{
return conflict;
}
conflict = inheritsWithDifferentTypeParams( null, Arrays.asList( iface.getInterfaces() ), csr );
if( conflict != null )
{
return conflict;
}
}
return null;
}
private void makeProxy( IGosuClassInternal gsClass, IType extendedType )
{
if( !(gsClass instanceof IGosuEnhancementInternal) && extendedType instanceof IJavaType )
{
// Create a gosu class proxy for the java one.
// It is attached to the JavaType as its adapterClass.
GosuClassProxyFactory.instance().create( extendedType );
}
}
private Object parseFunctionOrConstructorOrFieldDeclaration( IGosuClassInternal gsClass )
{
int[] location = new int[3];
Object rtn = _parseFunctionOrConstructorOrFieldDeclaration( gsClass, location );
if( rtn != null )
{
setLocation( location[0], location[1], location[2] );
}
return rtn;
}
//------------------------------------------------------------------------------
//
// class-member-declaration
// <constructor-declaration>
// <function-declaration>
// <field-declaration>
//
// constructor-declaration
// [modifiers] function <class-name> ( [ <argument-declaration-list> ] )
//
// function-declaration
// [modifiers] function <identifier> ( [ <argument-declaration-list> ] ) [: <type-literal>]
//
// field-declaration
// [modifiers] var <identifier> [ : <type-literal> ] = <expression>
// [modifiers] var <identifier> : <type-literal> [ = <expression> ]
//
private Object _parseFunctionOrConstructorOrFieldDeclaration( IGosuClassInternal gsClass, int[] location )
{
String[] T = new String[1];
ModifierInfo modifiers;
boolean bInterface = gsClass.isInterface();
while( true )
{
modifiers = parseUntilMemberKeyword( T, false, -1, location );
if( modifiers.getModifiers() == -1 )
{
return null;
}
if( Keyword.KW_class.equals( T[0] ) ||
Keyword.KW_interface.equals( T[0] ) ||
Keyword.KW_annotation.equals( T[0] ) ||
Keyword.KW_structure.equals( T[0] ) ||
Keyword.KW_enum.equals( T[0] ) )
{
if( bInterface && Keyword.KW_enum.equals( T[0] ))
{
verify( getClassStatement(), !Modifier.isFinal( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_final, Keyword.KW_enum );
}
parseInnerClassDeclaration();
}
else
{
break;
}
}
if( bInterface )
{
modifiers.addModifiers( Modifier.PUBLIC );
}
if( T[0] != null &&
(Keyword.KW_function.equals( T[0] ) ||
Keyword.KW_construct.equals( T[0] )) )
{
String ctorNameToken = null;
boolean bConstructKeyword = false;
if( Keyword.KW_construct.equals( T[0] ) )
{
T[0] = gsClass.getRelativeName();
ctorNameToken = T[0];
bConstructKeyword = true;
}
else
{
int mark = getTokenizer().mark();
if( match( null, null, SourceCodeTokenizer.TT_WORD, true ) )
{
T[0] = getTokenizer().getTokenAt( mark ).getStringValue();
}
}
FunctionStatement fs = makeFunctionOrConstructorStatement( gsClass, T[0], bConstructKeyword );
IParserState constructOrFunctionState = makeLazyLightweightParserState();
verify( fs, !(gsClass instanceof IGosuProgramInternal) || !((IGosuProgramInternal)gsClass).isStatementsOnly(),
Res.MSG_FUNCTIONS_NOT_ALLOWED_IN_THIS_CONTEXT );
DynamicFunctionSymbol dfs = getOwner().parseFunctionDecl( fs, ctorNameToken, false, false, modifiers );
fs.setDynamicFunctionSymbol( dfs );
pushStatement( fs );
if( bInterface && !match( null, null, '{', true ) ) {
modifiers.addModifiers( Modifier.ABSTRACT );
dfs.setAbstract( true );
}
if( dfs != null )
{
dfs.setClassMember( true );
boolean bConstructor = dfs.getDisplayName().equals( gsClass.getRelativeName() );
if( bConstructor )
{
verify( fs, !Modifier.isAbstract(modifiers.getModifiers()), Res.MSG_MODIFIER_ABSTRACT_NOT_ALLOWED_HERE );
verify( fs, !gsClass.isInterface(), Res.MSG_NOT_ALLOWED_IN_INTERFACE );
verify( fs, !(gsClass instanceof IGosuProgramInternal), Res.MSG_CONSTRUCTORS_NOT_ALLOWD_IN_THIS_CONTEXT );
verify( fs, !Modifier.isOverride( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_override, Keyword.KW_construct );
verify( fs, !Modifier.isFinal( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_final, Keyword.KW_construct );
verify( fs, !Modifier.isStatic( modifiers.getModifiers() ), Res.MSG_NO_STATIC_CONSTRUCTOR );
verify( fs, !Modifier.isTransient( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_transient, Keyword.KW_construct );
if( !bConstructKeyword )
{
fs.addParseWarning( new ObsoleteConstructorWarning( constructOrFunctionState, Res.MSG_OBSOLETE_CTOR_SYNTAX ) );
}
}
else
{
verifyNoCombinedFinalStaticModifierDefined( fs, false, modifiers.getModifiers() );
verify(fs, !Modifier.isAbstract(modifiers.getModifiers()) || !Modifier.isStatic(modifiers.getModifiers()), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_static, Keyword.KW_abstract);
verify(fs, !Modifier.isAbstract(modifiers.getModifiers()) || !Modifier.isFinal(modifiers.getModifiers()), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_final, Keyword.KW_abstract);
verify( fs, !Modifier.isTransient(modifiers.getModifiers()), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_transient, Keyword.KW_function );
}
}
eatOptionalSemiColon( bInterface );
if( !Modifier.isNative( modifiers.getModifiers() ) && !Modifier.isAbstract( modifiers.getModifiers() ) )
{
eatStatementBlock( fs, Res.MSG_EXPECTING_OPEN_BRACE_FOR_FUNCTION_DEF );
}
return dfs;
}
else if( T[0] != null && T[0].equals( Keyword.KW_property.toString() ) )
{
boolean bGetter = match( null, Keyword.KW_get );
verify( getClassStatement(), bGetter || match( null, Keyword.KW_set ), Res.MSG_EXPECTING_PROPERTY_GET_OR_SET_MODIFIER );
FunctionStatement fs = new FunctionStatement();
verifyNoCombinedFinalStaticModifierDefined( fs, false, modifiers.getModifiers() );
verify( fs, !(gsClass instanceof IGosuProgramInternal) || !((IGosuProgramInternal)gsClass).isStatementsOnly(),
Res.MSG_FUNCTIONS_NOT_ALLOWED_IN_THIS_CONTEXT );
DynamicFunctionSymbol dfs = getOwner().parseFunctionDecl( fs, true, bGetter, modifiers );
if( dfs == null )
{
getClassStatement().addParseException( new ParseException( makeFullParserState(), Res.MSG_EXPECTING_DECL ) );
return null;
}
if( dfs.getDisplayName().length() > 0 &&
dfs.getDisplayName().charAt(0) == '@' )
{
String name = dfs.getDisplayName().substring(1);
boolean bOuterLocalDefined = findLocalInOuters( name ) instanceof CapturedSymbol;
verifyOrWarn( fs, !bOuterLocalDefined, false, Res.MSG_VARIABLE_ALREADY_DEFINED, name );
}
if( bInterface && !match( null, null, '{', true ) )
{
verify( fs, !Modifier.isFinal( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_final, Keyword.KW_property );
modifiers.setModifiers( Modifier.setAbstract( modifiers.getModifiers(), true ) );
dfs.setAbstract( true );
}
verify( fs, !Modifier.isAbstract( modifiers.getModifiers() ) || !Modifier.isStatic( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_static, Keyword.KW_abstract );
verify( fs, !Modifier.isAbstract( modifiers.getModifiers() ) || !Modifier.isFinal( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_final, Keyword.KW_abstract );
verify( fs, !Modifier.isAbstract( modifiers.getModifiers() ) || gsClass.isAbstract(), Res.MSG_ABSTRACT_MEMBER_NOT_IN_ABSTRACT_CLASS );
fs.setDynamicFunctionSymbol( dfs );
pushStatement( fs );
setLocation( location[0], location[1], location[2] );
popStatement();
dfs.setClassMember( true );
eatOptionalSemiColon( bInterface );
if( !bInterface &&
!Modifier.isNative( modifiers.getModifiers() ) && !Modifier.isAbstract( modifiers.getModifiers() ) )
{
eatStatementBlock( fs, Res.MSG_EXPECTING_OPEN_BRACE_FOR_FUNCTION_DEF );
}
DynamicPropertySymbol dps = getOrCreateDynamicPropertySymbol( getClassStatement(), gsClass, dfs, bGetter );
PropertyStatement statement = new PropertyStatement( fs, dps );
verifyPropertiesAreSymmetric( bGetter, dfs, dps, statement );
pushStatement( statement );
return dps;
}
else if( T[0] != null && T[0].equals( Keyword.KW_var.toString() ) )
{
if( bInterface )
{
modifiers.setModifiers( Modifier.setStatic( modifiers.getModifiers(), true ) );
modifiers.setModifiers( Modifier.setFinal( modifiers.getModifiers(), true ) );
}
return parseFieldDecl( modifiers );
}
else if( T[0] != null && T[0].equals( Keyword.KW_delegate.toString() ) )
{
return parseDelegateDecl( modifiers, gsClass );
}
else
{
getClassStatement().addParseException( new ParseException( makeFullParserState(), Res.MSG_EXPECTING_DECL ) );
return null;
}
}
private void verifySuperTypeVarVariance( ClassStatement classStatement, IType type )
{
if( !type.isParameterizedType() || !getGosuClass().isGenericType() )
{
return;
}
IGenericTypeVariable[] gtvs = type.getGenericType().getGenericTypeVariables();
IType[] typeParameters = type.getTypeParameters();
for( int i = 0; i < typeParameters.length; i++ )
{
if( gtvs[i] != null && gtvs[i].getTypeVariableDefinition() != null )
{
Variance variance = Variance.maybeInferVariance( type, gtvs[i] );
verifyTypeVarVariance( variance, classStatement, typeParameters[i] );
}
}
}
private void verifyTypeVarVariance( Variance ctxVariance, ParsedElement elem, IType type )
{
if( !getGosuClass().isGenericType() )
{
return;
}
Variance.verifyTypeVarVariance( ctxVariance,
getGosuClass(),
( Variance ctxV, Variance typeVarV ) -> {
verify( elem, typeVarV == ctxV || typeVarV == Variance.DEFAULT || typeVarV == Variance.INVARIANT || ctxV == Variance.PENDING || typeVarV == Variance.PENDING,
Res.MSG_TYPE_VAR_VARIANCE_ERROR, type.getRelativeName(), typeVarV == null ? "null" : typeVarV.getDesc(), ctxV.getDesc(), type.getRelativeName() );
},
type );
}
private void verifyPropertiesAreSymmetric( boolean bGetter,
DynamicFunctionSymbol newFunction,
DynamicPropertySymbol propertySymbol,
Statement stmt )
{
DynamicFunctionSymbol getter;
DynamicFunctionSymbol setter;
if( bGetter )
{
getter = newFunction;
setter = propertySymbol == null ? null : propertySymbol.getSetterDfs();
}
else
{
getter = propertySymbol == null ? null : propertySymbol.getGetterDfs();
setter = newFunction;
}
if( getter != null && setter != null )
{
if( getter.isStatic() != setter.isStatic() )
{
verify( stmt, false, Res.MSG_PROPERTIES_MUST_AGREE_ON_STATIC_MODIFIERS );
}
if( setter.getArgs().size() == 1 )
{
IType setterType = setter.getArgTypes()[0];
IType returnType = getter.getReturnType();
if( !setterType.isAssignableFrom( returnType ) ||
!setterType.isAssignableFrom( propertySymbol.getType() ) )
{
verify( stmt, false, Res.MSG_PROPERTIES_MUST_AGREE_ON_TYPE );
}
}
}
else if( getter != null && propertySymbol != null && newFunction != null &&
getGosuClass() == propertySymbol.getScriptPart().getContainingType() &&
getter.getSuperDfs() == null )
{
verify( stmt, propertySymbol.getType().equals( newFunction.getReturnType() ), Res.MSG_PROPERTIES_MUST_AGREE_ON_TYPE );
}
}
private FunctionStatement makeFunctionOrConstructorStatement( IGosuClassInternal gsClass, String strMemberKeyword, boolean bConstructKeyword )
{
FunctionStatement fs;
if( gsClass != null &&
(bConstructKeyword || gsClass.getRelativeName().equals( strMemberKeyword )) )
{
fs = new ConstructorStatement( bConstructKeyword );
}
else
{
fs = new FunctionStatement();
}
return fs;
}
private ModifierInfo parseUntilMemberKeyword( String[] T, boolean bIgnoreErrors, int[] location )
{
return parseUntilMemberKeyword( T, bIgnoreErrors, -1, location );
}
private ModifierInfo parseUntilMemberKeyword( String[] T, boolean bIgnoreErrors, int iEnd, int[] location )
{
return parseUntilMemberKeyword( T, bIgnoreErrors, iEnd, location, null );
}
private ModifierInfo parseUntilMemberKeyword( String[] T, boolean bIgnoreErrors, int iEnd, int[] location, int[] mark )
{
boolean bPeek = T == null;
while( true )
{
if( location != null )
{
location[0] = getTokenizer().getTokenStart();
location[1] = getTokenizer().getLineNumber();
location[2] = getTokenizer().getTokenColumn();
}
if( mark != null )
{
mark[0] = getTokenizer().mark();
}
ModifierInfo modifiers = parseModifiers( bIgnoreErrors );
if( matchDeclarationKeyword( T, bPeek, getTokenizer() ) )
{
return modifiers;
}
popModifierList();
boolean bAte = false;
if( getGosuClass() instanceof IGosuProgram )
{
bAte = eatPossibleEnclosedVarInStmt(); // e.g., for( var foo in foos ) {...} we don't want the var foo to be consumed as a field (applies to GosuPrograms).
}
bAte = eatPossibleStatementBlock() || bAte;
if( location != null )
{
// Mark possible end location of member definition
location[0] = getTokenizer().getTokenStart();
}
if( match( null, SourceCodeTokenizer.TT_EOF ) ||
((!(getGosuClass() instanceof IGosuProgram) || !getGosuClass().isHeaderCompiled()) && match( null, '}' )) ||
(iEnd >= 0 && getTokenizer().getTokenStart() >= iEnd) )
{
modifiers.setModifiers( -1 );
return modifiers;
}
if( !bAte )
{
getTokenizer().nextToken();
if( getTokenizer().isEOF() )
{
modifiers.setModifiers( -1 );
return modifiers;
}
}
}
}
private void popModifierList()
{
ParseTree parseTree = getOwner().peekLocation();
if( parseTree == null )
{
return;
}
ParsedElement pe = parseTree.getParsedElement();
if( pe instanceof IModifierListClause )
{
List<ParseTree> locationsList = getLocationsList();
locationsList.remove( locationsList.size()-1 );
}
}
private void parseInnerClassDeclaration()
{
IGosuClassInternal enclosingGsClass = getClassStatement().getGosuClass();
int mark = getTokenizer().mark();
String strInnerClass = null;
if( verify( getClassStatement(), match( null, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_NAME_CLASS_DEF ) )
{
strInnerClass = getTokenizer().getTokenAt( mark ).getStringValue();
}
if( strInnerClass != null )
{
String name = enclosingGsClass.getName();
String dotInner = "." + strInnerClass;
verify( getClassStatement(), !name.equals(strInnerClass) &&
!name.contains(dotInner + ".") &&
!name.endsWith(dotInner) &&
!name.startsWith(strInnerClass + "."), Res.MSG_DUPLICATE_CLASS_FOUND, name + dotInner );
for( IGosuClass c : enclosingGsClass.getKnownInnerClassesWithoutCompiling().values() )
{
IGosuClassInternal innerClass = (IGosuClassInternal)c;
if( innerClass.getRelativeName().equals( strInnerClass ) )
{
int i = 0;
String relativeName = innerClass.getName();
while( innerClass.isDeclarationsCompiled() || innerClass.isDeclarationsBypassed() )
{
// The inner class is already declaration-compiled, maybe this is a duplicate inner class...
String duplicate = relativeName + "_duplicate_" + i++;
innerClass = (IGosuClassInternal)TypeSystem.getByFullNameIfValid( duplicate );
if( innerClass == null )
{
return;
}
}
parseInnerClassDeclaration( innerClass );
break;
}
}
}
}
private void parseInnerClassDeclaration( IGosuClassInternal innerClass ) {
// Preserve dfs decls map of outer class
Map<String, List<IFunctionSymbol>> restoreDfsDecls = copyDFSDecls( getOwner() );
try {
new GosuClassParser( getOwner(), innerClass ).parseDeclarations( innerClass );
if( innerClass.isInterface() )
{
ModifierInfo mi = (ModifierInfo)innerClass.getModifierInfo();
mi.setModifiers( Modifier.setStatic( mi.getModifiers(), true ));
}
}
finally {
getOwner().setDfsDeclInSetByName( restoreDfsDecls );
}
}
private static Map<String, List<IFunctionSymbol>> copyDFSDecls( GosuParser owner )
{
Map<String, List<IFunctionSymbol>> hashMap = new HashMap<>( owner.getDfsDecls() );
for( String name : hashMap.keySet() )
{
hashMap.put( name, new ArrayList<>( hashMap.get( name ) ) );
}
return hashMap;
}
private VarStatement parseFieldDecl( ModifierInfo modifiers )
{
Token T = new Token();
VarStatement varStmt = new VarStatement();
verify( varStmt, !Modifier.isAbstract( modifiers.getModifiers() ), Res.MSG_MODIFIER_ABSTRACT_NOT_ALLOWED_HERE );
verify( varStmt, !Modifier.isOverride( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_override, Keyword.KW_var );
final int iNameStart = getTokenizer().getTokenStart();
if( !verify( varStmt, match( T, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_IDENTIFIER_VAR ) )
{
T._strValue = null;
}
String strIdentifier = T._strValue == null ? "" : T._strValue;
boolean bAlreadyDefined = getSymbolTable().getSymbol( strIdentifier ) != null;
verify( varStmt, !bAlreadyDefined, Res.MSG_VARIABLE_ALREADY_DEFINED, strIdentifier );
checkForEnumConflict( varStmt, strIdentifier );
boolean bStatic = Modifier.isStatic( modifiers.getModifiers() );
GlobalScope scope;
if( match( null, Keyword.KW_application ) )
{
// NOTE: For class parsing APPLICATION == static
bStatic = true;
scope = GlobalScope.EXECUTION;
verifyOrWarn( varStmt, false, true, Res.MSG_APPLICATION_MODIFIER_HAS_BEEN_DEPRECATED );
}
else if( match( null, Keyword.KW_session ) )
{
bStatic = true;
scope = GlobalScope.SESSION;
verifyOrWarn( varStmt, false, true, Res.MSG_SESSION_MODIFIER_HAS_BEEN_DEPRECATED );
}
else if( match( null, Keyword.KW_request ) )
{
bStatic = true;
scope = GlobalScope.REQUEST;
verifyOrWarn( varStmt, false, true, Res.MSG_REQUEST_MODIFIER_HAS_BEEN_DEPRECATED );
}
else
{
// execution keyword may be there
boolean hasExecutionKeyword = match( null, Keyword.KW_execution );
scope = GlobalScope.EXECUTION;
verifyOrWarn( varStmt, !hasExecutionKeyword, true, Res.MSG_EXECUTION_MODIFIER_HAS_BEEN_DEPRECATED );
}
TypeLiteral typeLiteral = null;
if( match( null, ":", SourceCodeTokenizer.TT_OPERATOR ) )
{
getOwner().parseTypeLiteral();
typeLiteral = (TypeLiteral)popExpression();
}
else if( !match( null, "=", SourceCodeTokenizer.TT_OPERATOR, true ) )
{
if( match( null, null, '(', true ) )
{
getOwner().parseBlockLiteral();
typeLiteral = (TypeLiteral)popExpression();
}
}
IType type;
if( typeLiteral != null )
{
type = typeLiteral.getType().getType();
varStmt.setTypeLiteral( typeLiteral );
}
else
{
type = GosuParserTypes.NULL_TYPE();
}
varStmt.setScope( scope );
if( bStatic )
{
modifiers.setModifiers( Modifier.setStatic( modifiers.getModifiers(), true ) );
}
varStmt.setModifierInfo( modifiers );
if( !verify( varStmt, varStmt.isPrivate() || type != GosuParserTypes.NULL_TYPE(), Res.MSG_NON_PRIVATE_MEMBERS_MUST_DECLARE_TYPE ) )
{
type = ErrorType.getInstance();
}
DynamicPropertySymbol dpsVarProperty = getOwner().parseVarPropertyClause( varStmt, strIdentifier, type, false );
if( dpsVarProperty != null )
{
String propertyName = dpsVarProperty.getName();
ISymbol existingSym = getSymbolTable().getSymbol(propertyName);
boolean bOuterLocalDefined = findLocalInOuters( propertyName ) instanceof CapturedSymbol;
bAlreadyDefined = existingSym != null || bOuterLocalDefined || propertyName.equals( strIdentifier );
verify( varStmt, !bAlreadyDefined || existingSym instanceof DynamicPropertySymbol, Res.MSG_VARIABLE_ALREADY_DEFINED, propertyName );
getSymbolTable().putSymbol( dpsVarProperty );
verifyPropertiesAreSymmetric( true, dpsVarProperty.getGetterDfs(), dpsVarProperty, varStmt );
setStatic( bStatic, dpsVarProperty );
dpsVarProperty.addMemberSymbols( getGosuClass() );
}
AbstractDynamicSymbol symbol;
if( varStmt.getScope() == GlobalScope.EXECUTION )
{
symbol = new DynamicSymbol( getGosuClass(), getSymbolTable(), strIdentifier, type, null );
}
else
{
symbol = new ScopedDynamicSymbol( getSymbolTable(), strIdentifier, getGosuClass().getName(), type, varStmt.getScope() );
}
modifiers.addAll( symbol.getModifierInfo() );
if( varStmt.isPrivate() )
{
// Ensure private bit is explicit
modifiers.setModifiers( Modifier.setPrivate( modifiers.getModifiers(), true ) );
}
symbol.setModifierInfo( modifiers );
varStmt.setSymbol( symbol );
varStmt.setNameOffset( iNameStart, T._strValue );
if( bAlreadyDefined )
{
int iDupIndex = getOwner().nextIndexOfErrantDuplicateDynamicSymbol( symbol, getSymbolTable().getSymbols().values(), false );
if( iDupIndex >= 0 )
{
symbol.renameAsErrantDuplicate( iDupIndex );
}
}
getSymbolTable().putSymbol( symbol );
pushStatement( varStmt );
return varStmt;
}
private void checkForEnumConflict( VarStatement varStmt, String identifier )
{
if( getGosuClass().isEnum() )
{
ISymbol existingProp = getGosuClass().getMemberProperty( identifier );
verify( varStmt, !(existingProp instanceof DynamicPropertySymbol), Res.MSG_VARIABLE_ALREADY_DEFINED, identifier );
}
}
private VarStatement parseDelegateDecl( ModifierInfo modifiers, IGosuClassInternal gsClass )
{
Token T = new Token();
DelegateStatement delegateStmt = new DelegateStatement();
verify( delegateStmt, gsClass == null || (!gsClass.isInterface() && !gsClass.isEnum()), Res.MSG_DELEGATION_NOT_ALLOWED_HERE );
verify( delegateStmt, !Modifier.isStatic( modifiers.getModifiers() ), Res.MSG_DELEGATES_CANNOT_BE_STATIC );
verify( delegateStmt, !Modifier.isAbstract( modifiers.getModifiers() ), Res.MSG_MODIFIER_ABSTRACT_NOT_ALLOWED_HERE );
verify( delegateStmt, !Modifier.isOverride( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_override, Keyword.KW_var );
int iNameOffset = getTokenizer().getTokenStart();
if( verify( delegateStmt, match( T, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_IDENTIFIER_VAR ) )
{
delegateStmt.setNameOffset( iNameOffset, null );
}
String strIdentifier = T._strValue == null ? "" : T._strValue;
verify( delegateStmt, getSymbolTable().getSymbol( strIdentifier ) == null, Res.MSG_VARIABLE_ALREADY_DEFINED, strIdentifier );
GlobalScope scope = GlobalScope.EXECUTION;
TypeLiteral typeLiteral = null;
if( match( null, ":", SourceCodeTokenizer.TT_OPERATOR ) )
{
getOwner().parseTypeLiteral();
typeLiteral = (TypeLiteral)popExpression();
}
List<IType> constituents = new ArrayList<IType>();
if( verify( delegateStmt, match( null, Keyword.KW_represents ), Res.MSG_EXPECTING_REPRESENTS ) )
{
do
{
getOwner().parseTypeLiteral();
TypeLiteral ifaceLiteral = (TypeLiteral)popExpression();
IType iface = ifaceLiteral.getType().getType();
if( !(iface instanceof ErrorType) )
{
verify( ifaceLiteral, iface.isInterface() && !iface.isCompoundType(), Res.MSG_DELEGATES_REPRESENT_INTERFACES_ONLY );
verify( ifaceLiteral, TypeLord.isDelegatableInterface( gsClass, iface ), Res.MSG_CLASS_DOES_NOT_IMPL, iface );
verify( typeLiteral, typeLiteral == null || TypeLord.isDelegatableInterface( typeLiteral.getType().getType(), iface ), Res.MSG_CLASS_DOES_NOT_IMPL, iface );
}
constituents.add( iface );
} while( match( null, ',' ) );
}
delegateStmt.setConstituents( constituents );
IType type;
if( typeLiteral != null )
{
type = typeLiteral.getType().getType();
delegateStmt.setTypeLiteral( typeLiteral );
}
else
{
type = GosuParserTypes.NULL_TYPE();
}
delegateStmt.setScope( scope );
delegateStmt.setModifierInfo( modifiers );
verify( delegateStmt, delegateStmt.isPrivate() || type != GosuParserTypes.NULL_TYPE(), Res.MSG_NON_PRIVATE_MEMBERS_MUST_DECLARE_TYPE );
AbstractDynamicSymbol symbol = new DynamicSymbol( getGosuClass(), getSymbolTable(), strIdentifier, type, null );
modifiers.addAll( symbol.getModifierInfo() );
symbol.setModifierInfo( modifiers );
delegateStmt.setSymbol( symbol );
getSymbolTable().putSymbol( symbol );
pushStatement( delegateStmt );
return delegateStmt;
}
private void setStatic( boolean bStatic, DynamicPropertySymbol dpsVarProperty )
{
dpsVarProperty.setStatic( bStatic );
if( dpsVarProperty.getSetterDfs() != null )
{
dpsVarProperty.getSetterDfs().setStatic( bStatic );
}
if( dpsVarProperty.getGetterDfs() != null )
{
dpsVarProperty.getGetterDfs().setStatic( bStatic );
}
}
//------------------------------------------------------------------------------
//
// class-statement
// [modifiers] class <identifier> [extends <base-class>] [implements <interfaces-list>] { <class-members> }
//
boolean parseClassStatement()
{
IGosuClassInternal gsClass = getGosuClass();
ensureAbstractMethodsImpledAndNoDiamonds( gsClass );
ensureInheritedMethodsDoNotClash( gsClass );
//## todo: remove this scope?
getSymbolTable().pushScope();
try
{
verify( getClassStatement(), gsClass instanceof IGosuProgram || match( null, '{' ), Res.MSG_EXPECTING_OPEN_BRACE_FOR_CLASS_DEF );
Statement currentStmt = (isTopLevelClass( gsClass ) || TypeLord.isEvalProgram( gsClass )) ? null : peekStatement();
parseClassMembers( gsClass );
for( Statement stmt = peekStatement(); stmt != currentStmt; stmt = peekStatement() )
{
stmt = popStatement();
if( stmt instanceof VarStatement ||
stmt instanceof FunctionStatement ||
stmt instanceof PropertyStatement ||
stmt instanceof NoOpStatement ||
stmt instanceof NamespaceStatement ||
stmt instanceof UsesStatement ||
stmt instanceof ClassStatement )
{
// ignore
}
else
{
throw new IllegalStateException( "Expecting only statements for: package, uses, var, function, or property." );
}
}
verify( getClassStatement(), match( null, '}' ) || gsClass instanceof IGosuProgram, Res.MSG_EXPECTING_CLOSE_BRACE_FOR_CLASS_DEF );
}
finally
{
getSymbolTable().popScope();
}
return true;
}
private void ensureInheritedMethodsDoNotClash( IGosuClassInternal gsClass )
{
if( !inheritsFromTwoOrMoreTypes( gsClass ) )
{
return;
}
MethodList methods = gsClass.getTypeInfo().getMethods( gsClass );
for( DynamicArray<IMethodInfo> bucket: methods.getMethodBuckets() )
{
if( bucket.size() > 1 )
{
Map<String, IReducedDynamicFunctionSymbol> functionTypes = new HashMap<>();
for( IMethodInfo mi : bucket )
{
if( mi instanceof IGosuMethodInfo )
{
IReducedDynamicFunctionSymbol dfs = ((IGosuMethodInfo)mi).getDfs();
IReducedDynamicFunctionSymbol originalDfs = dfs;
while( true )
{
IReducedDynamicFunctionSymbol superDfs = dfs.getSuperDfs();
if( superDfs != null && superDfs != dfs )
{
dfs = superDfs;
}
else
{
IReducedDynamicFunctionSymbol backingDfs = dfs.getBackingDfs();
if( backingDfs != null && backingDfs != dfs )
{
dfs = backingDfs;
}
else
{
break;
}
}
}
if( dfs != originalDfs && !(dfs.getGosuClass() instanceof IGosuEnhancement) )
{
FunctionType ft = ((FunctionType)dfs.getType()).getRuntimeType();
String paramSignature = ft.getParamSignature();
IReducedDynamicFunctionSymbol existingDfs = functionTypes.get( paramSignature );
if( existingDfs != null && existingDfs.getGosuClass() != dfs.getGosuClass() )
{
addError( getClassStatement(), Res.MSG_FUNCTION_CLASH_PARAMS, dfs.getName(), dfs.getGosuClass().getName(), existingDfs.getName(), existingDfs.getGosuClass().getName() );
}
functionTypes.put( paramSignature, dfs );
}
}
}
}
}
}
private boolean inheritsFromTwoOrMoreTypes( IGosuClassInternal gsClass )
{
int iCount = gsClass.getSuperClass() == null ? 0 : 1;
iCount += (gsClass.getInterfaces().length - 1); // subtract IGosuObject proxy
return iCount > 1;
}
private void ensureAbstractMethodsImpledAndNoDiamonds( IGosuClassInternal gsClass )
{
List<IFunctionType> unimpled = gsClass.getUnimplementedMethods();
for( Iterator<IFunctionType> iter = unimpled.iterator(); iter.hasNext(); )
{
IFunctionType funcType = iter.next();
final IMethodInfo mi = funcType.getMethodInfo();
if( mi.isDefaultImpl() )
{
// mi is a default interface method the class (or interface) does not override,
// check for a duplicate, not-overridden method that comes from an interface that
// is unrelated to mi's declaring interface
// i.e., prohibit "diamond" patterns directly interface-inherited from the class (or interface).
if( conflictsWithUnrelatedIfaceMethod( gsClass, funcType, unimpled ) )
{
iter.remove();
}
}
else if( !gsClass.isInterface() && !gsClass.isAbstract() )
{
// mi is abstract, the non-abstract class failed to implement it...
String strClass = funcType.getEnclosingType().getName();
strClass = IGosuClass.ProxyUtil.getNameSansProxy( strClass );
getClassStatement().addParseException( new NotImplementedParseException( makeFullParserState(), gsClass, strClass, funcType ) );
}
}
}
private boolean conflictsWithUnrelatedIfaceMethod( IGosuClassInternal gsClass, IFunctionType ft, List<IFunctionType> unimpled )
{
IMethodInfo mi = ft.getMethodInfo();
outer:
for( IFunctionType funcType: unimpled )
{
if( ft == funcType )
{
continue;
}
final IMethodInfo csrMi = funcType.getMethodInfo();
if( csrMi.getDisplayName().equals( mi.getDisplayName() ) &&
csrMi.getParameters().length == mi.getParameters().length &&
!csrMi.getOwnersType().isAssignableFrom( mi.getOwnersType() ) &&
!mi.getOwnersType().isAssignableFrom( csrMi.getOwnersType() ) )
{
IParameterInfo[] csrParams = csrMi.getParameters();
IParameterInfo[] params = mi.getParameters();
for( int i = 0; i < csrParams.length; i++ )
{
IParameterInfo csrPi = csrParams[i];
IParameterInfo pi = params[i];
IRType csrDescriptor = IRTypeResolver.getDescriptor( csrPi.getFeatureType() );
IRType descriptor = IRTypeResolver.getDescriptor( pi.getFeatureType() );
if( !csrDescriptor.equals( descriptor ) )
{
break outer;
}
}
if( csrMi.isDefaultImpl() )
{
getClassStatement().addParseException( makeFullParserState(), Res.MSG_INHERITS_UNRELATED_DEFAULTS, gsClass.getName(), funcType, mi.getOwnersType().getName(), csrMi.getOwnersType().getName() );
}
else if( gsClass.isAbstract() ) // interface or abstract class
{
getClassStatement().addParseException( makeFullParserState(), Res.MSG_INHERITS_ABSTRACT_AND_DEFAULT, gsClass.getName(), funcType, mi.getOwnersType().getName(), csrMi.getOwnersType().getName() );
}
return true;
}
}
return false;
}
// class-members
// <class-member>
// <class-members> <class-member>
//
// class-member
// <function-definition>
// <var-statement>
//
private void parseClassMembers( IGosuClassInternal gsClass )
{
if( isInnerClass( gsClass ) && !gsClass.isStatic() )
{
addOuterMember( gsClass );
}
ClassScopeCache scopeCache = makeClassScopeCache( gsClass );
parseEnumConstants( gsClass, scopeCache );
do
{
getOwner().checkInstruction( true );
while( match( null, ';' ) )
{
pushStatement( new NoOpStatement() );
}
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
ModifierInfo modifiers;
if( gsClass instanceof IGosuProgram )
{
int[] locations = new int[3];
modifiers = parseUntilMemberKeyword( null, false, locations );
iOffset = locations[0];
iLineNum = locations[1];
iColumn = locations[2];
}
else
{
// push static class symbols for annotations (they are part of modifier parsing)
pushClassSymbols( true, scopeCache );
try
{
modifiers = parseModifiers();
}
finally
{
popClassSymbols();
}
}
boolean bStatic = Modifier.isStatic( modifiers.getModifiers() );
if( gsClass.isInterface() )
{
modifiers.setModifiers( Modifier.setPublic( modifiers.getModifiers(), true ) );
}
boolean bDeprecated = isDeprecated( modifiers );
if( bDeprecated )
{
getOwner().pushIgnoreTypeDeprecation();
}
try
{
boolean bConstructSyntax = false;
Token T = new Token();
if( match( null, Keyword.KW_function ) ||
(bConstructSyntax = match( null, Keyword.KW_construct )) )
{
FunctionStatement functionStmt;
if( bConstructSyntax || isOldStyleConstructor( gsClass, T ) )
{
functionStmt = parseBaseConstructorDefinition( bConstructSyntax, modifiers.getAnnotations(), scopeCache );
verifyModifiers( functionStmt, modifiers, UsageTarget.ConstructorTarget );
}
else
{
pushClassSymbols( bStatic, scopeCache );
try
{
functionStmt = getOwner().parseBaseFunctionDefinition( null, false, false, modifiers );
if( gsClass.isInterface() && !bStatic )
{
eatOptionalSemiColon( true );
pushStatement( functionStmt );
}
verifyModifiers( functionStmt, modifiers, UsageTarget.MethodTarget );
}
finally
{
popClassSymbols();
}
}
DynamicFunctionSymbol dfs = functionStmt == null ? null : functionStmt.getDynamicFunctionSymbol();
if( dfs != null )
{
dfs.setClassMember( true );
if( dfs.getDisplayName().equals( gsClass.getRelativeName() ) )
{
gsClass.getParseInfo().addConstructorFunction(dfs);
}
else
{
gsClass.getParseInfo().addMemberFunction(dfs);
}
verifyTypeVarVariance( Variance.COVARIANT, functionStmt, dfs.getType() );
}
setLocation( iOffset, iLineNum, iColumn );
}
else if( match( null, Keyword.KW_property ) )
{
pushClassSymbols( bStatic, scopeCache );
try
{
Token t = new Token();
boolean bGetter = match( t, Keyword.KW_get );
boolean bSetter = !bGetter && match( null, Keyword.KW_set );
if( !bGetter && !bSetter )
{
getOwner().maybeEatNonDeclKeyword( false, t._strValue );
}
FunctionStatement functionStmt = getOwner().parseBaseFunctionDefinition( null, true, bGetter, modifiers );
verify( functionStmt, bGetter || bSetter, Res.MSG_EXPECTING_PROPERTY_GET_OR_SET_MODIFIER );
setLocation( iOffset, iLineNum, iColumn );
getOwner().popStatement();
DynamicFunctionSymbol dfs = functionStmt.getDynamicFunctionSymbol();
if( dfs != null )
{
IType returnType = functionStmt.getDynamicFunctionSymbol().getReturnType();
verify( functionStmt, bGetter || returnType == JavaTypes.pVOID(), Res.MSG_PROPERTY_SET_MUST_RETURN_VOID );
if( bGetter && dfs.getArgTypes() != null && dfs.getArgTypes().length > 0 )
{
List<IParameterDeclaration> parameters = functionStmt.getParameters();
for( IParameterDeclaration par : parameters )
{
par.addParseException( Res.MSG_GETTER_CANNOT_HAVE_PARAMETERS );
}
}
dfs.setClassMember( true );
DynamicPropertySymbol dps = getOrCreateDynamicPropertySymbol( functionStmt, gsClass, dfs, bGetter );
PropertyStatement stmt = new PropertyStatement( functionStmt, dps );
getOwner().pushStatement( stmt );
setLocation( iOffset, iLineNum, iColumn, true );
verifyPropertiesAreSymmetric( bGetter, dfs, dps, stmt );
dps.addMemberSymbols( gsClass );
if( bGetter )
{
verifyTypeVarVariance( Variance.COVARIANT, functionStmt, dps.getGetterDfs().getReturnType() );
}
else if( dps.getSetterDfs().getArgTypes().length > 0 )
{
verifyTypeVarVariance( Variance.CONTRAVARIANT, functionStmt, dps.getSetterDfs().getArgTypes()[0] );
}
}
verifyModifiers( functionStmt, modifiers, UsageTarget.PropertyTarget );
}
finally
{
popClassSymbols();
}
}
else if( match( null, Keyword.KW_var ) )
{
getOwner().pushParsingStaticMember( bStatic );
try
{
VarStatement varStmt = parseFieldDefn( gsClass, bStatic, scopeCache, modifiers );
verifyTypeVarVariance( Variance.INVARIANT, varStmt, varStmt.getType() );
setLocation( iOffset, iLineNum, iColumn );
removeInitializerIfInProgram( varStmt );
verifyModifiers( varStmt, modifiers, UsageTarget.PropertyTarget );
}
finally
{
getOwner().popParsingStaticMember();
}
}
else if( match( null, Keyword.KW_delegate ) )
{
DelegateStatement ds = parseDelegateDefn( gsClass, scopeCache, modifiers );
verifyModifiers( ds, modifiers, UsageTarget.PropertyTarget );
verifyTypeVarVariance( Variance.INVARIANT, ds, ds.getType() );
setLocation( iOffset, iLineNum, iColumn );
}
else if( match( T, Keyword.KW_class ) ||
match( T, Keyword.KW_interface ) ||
match( T, Keyword.KW_annotation ) ||
match( T, Keyword.KW_structure ) ||
match( T, Keyword.KW_enum ) )
{
// Pop the modifier list from the declaration phase, otherwise we'll have duplicates
popModifierList();
IGosuClassInternal inner = parseInnerClassDefinition( T );
if( inner != null )
{
inner.setAnnotations( modifiers.getAnnotations() );
if( inner.isInterface() )
{
modifiers.setModifiers( Modifier.setStatic( modifiers.getModifiers(), true ) );
ModifierInfo existingMI = (ModifierInfo)inner.getModifierInfo();
existingMI.addModifiers( modifiers.getModifiers() );
}
verifyModifiers( inner.getClassStatement(), modifiers, UsageTarget.TypeTarget );
}
}
else
{
// Pop the trailing modifier list, which doesn't correspond to any member
popModifierList();
if( !match( null, null, '}', true ) &&
!match( null, SourceCodeTokenizer.TT_EOF ) )
{
// Consume token first
boolean openBrace = false;
if( match( null, '{' ) )
{
openBrace = true;
}
else
{
getOwner().getTokenizer().nextToken();
}
NoOpStatement noop = new NoOpStatement();
verify( noop, false, Res.MSG_UNEXPECTED_TOKEN, getOwner().getTokenizer().getTokenAsString() );
pushStatement( noop );
setLocation( iOffset, iLineNum, iColumn );
if( openBrace )
{
eatBlock( '{', '}', false );
}
}
else
{
break;
}
}
}
finally
{
if( bDeprecated )
{
getOwner().popIgnoreTypeDeprecation();
}
}
} while( true );
}
private boolean isDeprecated( ModifierInfo modifiers )
{
List<IGosuAnnotation> annotations = modifiers.getAnnotations();
if( annotations != null )
{
for( IGosuAnnotation an : annotations )
{
if( an.getName().equalsIgnoreCase( "Deprecated" ) )
{
return true;
}
}
}
return false;
}
private boolean isOldStyleConstructor( IGosuClassInternal gsClass, Token t )
{
return match( t, null, SourceCodeTokenizer.TT_WORD, true ) &&
!gsClass.isInterface() &&
t._strValue.equals( gsClass.getRelativeName() );
}
private ClassScopeCache makeClassScopeCache( IGosuClassInternal gsClass )
{
// Copy the Static Scope so we can reuse it for each member
//
IScope staticScope;
Map<String, List<IFunctionSymbol>> staticDfsMap;
getSymbolTable().pushScope();
try
{
//getOwner().clearDfsDeclInSetByName();
getOwner().newDfsDeclInSetByName();
gsClass.putClassMembers( getOwner(), getSymbolTable(), getGosuClass(), true );
staticDfsMap = getOwner().getDfsDecls();
}
finally
{
staticScope = getSymbolTable().popScope();
}
// Copy the Non-Static Scope so we can reuse it for each member
//
IScope nonstaticScope;
Map<String, List<IFunctionSymbol>> nonstaticDfsMap;
getSymbolTable().pushScope();
try
{
getOwner().newDfsDeclInSetByName();
gsClass.putClassMembers( getOwner(), getSymbolTable(), getGosuClass(), false );
nonstaticDfsMap = getOwner().getDfsDecls();
getOwner().newDfsDeclInSetByName();
}
finally
{
nonstaticScope = getSymbolTable().popScope();
}
return new ClassScopeCache( staticScope, staticDfsMap, nonstaticScope, nonstaticDfsMap );
}
private void popClassSymbols()
{
getSymbolTable().popScope();
getOwner().popParsingStaticMember();
getOwner().newDfsDeclInSetByName();
}
private void pushClassSymbols( boolean bStatic, ClassScopeCache classScopeCache )
{
getOwner().setDfsDeclInSetByName( bStatic ? classScopeCache.getStaticDfsMap() : classScopeCache.getNonstaticDfsMap() );
getSymbolTable().pushScope( bStatic ? classScopeCache.getStaticScope() : classScopeCache.getNonstaticScope() );
getOwner().pushParsingStaticMember( bStatic );
}
private void removeInitializerIfInProgram( VarStatement varStmt )
{
if( !(getGosuClass() instanceof IGosuProgram) || getOwner().isEditorParser() )
{
return;
}
ParseTree location = varStmt.getLocation();
List<IParseTree> children = location.getChildren();
int iChildCount = children.size();
if( iChildCount > 3 )
{
if( iChildCount > 4 )
{
if( !(children.get( 3 ).getParsedElement() instanceof NameInDeclaration) ) // this is another NameInDeclaration for the Property name, which can be null if the name was not specified after the 'as' clause
{
throw new IllegalStateException( "Expecting children: 1 for NameInDeclaration, 1 for the type, (maybe another NameInDeclaration if an alias property declared), and 1 for the initializer" );
}
}
IParseTree initializerExpr = children.get( iChildCount -1 );
if( initializerExpr != null )
{
location.removeChild( initializerExpr );
}
}
}
private IGosuClassInternal parseInnerClassDefinition( Token t )
{
IGosuClassInternal enclosingGsClass = getClassStatement().getGosuClass();
verify( getClassStatement(), match( t, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_NAME_CLASS_DEF );
String strInnerClass = t._strValue;
if( strInnerClass != null )
{
for( IGosuClass c : enclosingGsClass.getKnownInnerClassesWithoutCompiling().values() )
{
IGosuClassInternal innerClass = (IGosuClassInternal)c;
if( innerClass.getRelativeName().equals( strInnerClass ) )
{
int i = 0;
String relativeName = innerClass.getName();
while( innerClass.isDefinitionsCompiled() )
{
// The inner class is already definition-compiled, maybe this is a duplicate inner class...
String duplicate = relativeName + "_duplicate_" + i++;
innerClass = (IGosuClassInternal)TypeSystem.getByFullNameIfValid( duplicate );
if( innerClass == null )
{
return null;
}
}
new GosuClassParser( getOwner(), innerClass ).parseDefinitions( innerClass );
return innerClass;
}
}
}
return null;
}
private void parseEnumConstants( IGosuClassInternal gsClass,
ClassScopeCache scopeCache )
{
boolean bEnum = gsClass != null && gsClass.isEnum();
if( !bEnum )
{
return;
}
Set<String> constants = new HashSet<String>();
Token t = new Token();
boolean bConst;
do
{
bConst = false;
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
if( match( t, null, SourceCodeTokenizer.TT_WORD, true ) &&
!Keyword.isKeyword( t._strValue ) &&
match( t, SourceCodeTokenizer.TT_WORD ) )
{
parseEnumConstant( t._strValue, scopeCache, constants.contains( t._strValue ) );
setLocation(iOffset, iLineNum, iColumn);
constants.add( t._strValue );
popStatement();
bConst = true;
}
if( match( null, ';' ) )
{
break;
}
} while( bConst && match( null, ',' ) );
}
private void parseEnumConstant( String strIdentifier, ClassScopeCache scopeCache, boolean bIsDuplicate )
{
IGosuClassInternal gsClass = getGosuClass();
VarStatement varStmt = gsClass.getStaticField( strIdentifier );
if( bIsDuplicate )
{
VarStatement dup = new VarStatement();
dup.setSymbol( varStmt.getSymbol() );
dup.setModifierInfo( varStmt.getModifierInfo() );
dup.setParent( varStmt.getParent() );
dup.setScope( varStmt.getScope() );
varStmt = dup;
}
pushClassSymbols( true, scopeCache );
try
{
getOwner().parseNewExpressionOrAnnotation( gsClass, false, !match( null, null, '(', true ), null, -1 );
Expression asExpr = popExpression();
varStmt.setAsExpression( asExpr );
if( asExpr.hasParseExceptions() )
{
for( IParseIssue pe : asExpr.getParseExceptions() )
{
varStmt.addParseException( pe );
//noinspection ThrowableResultOfMethodCallIgnored
asExpr.removeParseException( pe.getMessageKey() );
}
}
varStmt.setScriptPart( getOwner().getScriptPart() );
pushStatement( varStmt );
ISymbol symbol = varStmt.getSymbol();
symbol.setType( gsClass );
varStmt.setType( gsClass );
varStmt.setEnumConstant( true );
varStmt.setDefinitionParsed( true );
//noinspection unchecked
scopeCache.getNonstaticScope().put( varStmt.getSymbol().getName(), varStmt.getSymbol() );
gsClass.getParseInfo().addMemberField(varStmt);
}
finally
{
popClassSymbols();
}
}
private VarStatement parseFieldDefn( IGosuClassInternal gsClass, boolean bStatic, ClassScopeCache scopeCache, ModifierInfo modifiers )
{
if( gsClass.isInterface() )
{
bStatic = true;
}
Token t = new Token();
String strIdentifier = "";
boolean bHasName;
if( bHasName = match( t, SourceCodeTokenizer.TT_WORD ) )
{
strIdentifier = t._strValue;
}
else
{
t._strValue = null;
}
getOwner().maybeEatNonDeclKeyword( bHasName, strIdentifier );
VarStatement varStmt;
boolean bOuterLocalDefined = findLocalInOuters( strIdentifier ) != null;
if( !bStatic )
{
varStmt = findMemberField( gsClass, strIdentifier );
if( varStmt == null )
{
// It might not be in the non-static map if it is a scoped variable
varStmt = findStaticMemberField( gsClass, strIdentifier );
if( varStmt != null )
{
bStatic = true;
}
}
}
else
{
varStmt = findStaticMemberField( gsClass, strIdentifier );
}
verifyOrWarn( varStmt, varStmt == null || !bOuterLocalDefined, false, Res.MSG_VARIABLE_ALREADY_DEFINED, strIdentifier );
if( !bStatic && varStmt != null && varStmt.isStatic() )
{
// Force static scope if the var is static. This is for scoped vars
bStatic = true;
}
pushClassSymbols( bStatic, scopeCache );
try
{
if( varStmt == null )
{
// This is for error conditions like vars appearing on enhancements
varStmt = new VarStatement();
getOwner().parseVarStatement( varStmt, t, false );
}
else
{
getOwner().parseVarStatement( varStmt, t, true );
}
if( bStatic )
{
//noinspection unchecked
scopeCache.getNonstaticScope().put( varStmt.getSymbol().getName(), varStmt.getSymbol() );
}
DynamicPropertySymbol dps = getOwner().parseVarPropertyClause( varStmt, varStmt.getIdentifierName(), varStmt.getType(), true );
if( dps != null )
{
verifyPropertiesAreSymmetric( true, dps.getGetterDfs(), dps, varStmt );
setStatic( bStatic, dps );
dps.addMemberSymbols( gsClass );
dps.updateAnnotations( modifiers.getAnnotations() );
}
// Consume optional trailing semi as part of the statement
match( null, ';' );
varStmt.getModifierInfo().setAnnotations( modifiers.getAnnotations() );
gsClass.getParseInfo().addMemberField(varStmt);
return varStmt;
}
finally
{
popClassSymbols();
}
}
private ISymbol findLocalInOuters( String strIdentifier )
{
if( (isParsingBlock() || getParsingAnonymousClass() != null) && !getOwner().isParsingAnnotation() )
{
return captureSymbol( getCurrentEnclosingGosuClass(), strIdentifier, null );
}
return null;
}
private VarStatement findMemberField( IGosuClassInternal gsClass, String name )
{
gsClass.compileDeclarationsIfNeeded();
return assignPossibleDuplicateField( name, gsClass.getParseInfo().getMemberFields() );
}
private VarStatement findStaticMemberField( IGosuClassInternal gsClass, String name )
{
gsClass.compileDeclarationsIfNeeded();
return assignPossibleDuplicateField( name, gsClass.getParseInfo().getStaticFields() );
}
private VarStatement assignPossibleDuplicateField( String name, Map<String, VarStatement> fields )
{
VarStatement varStmt = fields.get( name );
varStmt = assignPossibleDuplicateField( name, varStmt, fields );
return varStmt;
}
VarStatement assignPossibleDuplicateField( String name, VarStatement varStmt, Map<String, VarStatement> map )
{
VarStatement result = varStmt;
if( varStmt == null || varStmt.isDefinitionParsed() )
{
int iMin = Integer.MAX_VALUE;
for( String nameCsr : map.keySet() )
{
String strName = nameCsr.toString();
if( strName.toLowerCase().contains( "_duplicate_" + name.toString().toLowerCase() ) )
{
VarStatement stmtCsr = map.get( nameCsr );
if( !stmtCsr.isDefinitionParsed() )
{
int iIndex = Integer.parseInt( strName.substring( 0, strName.indexOf( '_' ) ) );
if( iIndex < iMin )
{
iMin = iIndex;
result = stmtCsr;
}
}
}
}
}
return result;
}
private DelegateStatement parseDelegateDefn( IGosuClassInternal gsClass, ClassScopeCache scopeCache, ModifierInfo modifiers )
{
Token t = new Token();
int iNameOffset = getTokenizer().getTokenStart();
boolean bHasName = match( t, SourceCodeTokenizer.TT_WORD );
String strIdentifier = t._strValue == null ? "" : t._strValue;
getOwner().maybeEatNonDeclKeyword( bHasName, strIdentifier );
String insensitveIdentifier = strIdentifier;
VarStatement varStmt = gsClass.getMemberField( insensitveIdentifier );
if( varStmt != null )
{
varStmt.setNameOffset( iNameOffset, strIdentifier );
}
pushClassSymbols( false, scopeCache );
try
{
//Need to ensure that the varStmt is indeed a delegate statement, because it might be a conflicting var stmt
DelegateStatement delStmt;
if( varStmt instanceof DelegateStatement )
{
delStmt = (DelegateStatement)varStmt;
}
else
{
delStmt = new DelegateStatement();
delStmt.setModifierInfo( modifiers );
}
if( varStmt == null )
{
// This is for error conditions like delegates appearing on enhancements
varStmt = new DelegateStatement();
varStmt.setModifierInfo( modifiers );
varStmt.setSymbol( new Symbol( strIdentifier, JavaTypes.OBJECT(), null ) );
verify( delStmt, !Modifier.isStatic( modifiers.getModifiers() ), Res.MSG_DELEGATES_CANNOT_BE_STATIC );
getOwner().parseDelegateStatement( delStmt, strIdentifier );
}
else
{
getOwner().parseDelegateStatement( delStmt, strIdentifier );
}
gsClass.getParseInfo().addMemberField(varStmt);
return delStmt;
}
finally
{
popClassSymbols();
}
}
DynamicPropertySymbol getOrCreateDynamicPropertySymbol(
ParsedElement parsedElement, ICompilableTypeInternal gsClass, DynamicFunctionSymbol dfs, boolean bGetter )
{
String strPropertyName = dfs.getDisplayName().substring( 1 );
ISymbol symbol = getSymbolTable().getSymbol( strPropertyName );
if( symbol != null && !dfs.getDisplayName().contains( symbol.getDisplayName() ) )
{
// Force case sensitivity, mainly to make overrides consistent
symbol = null;
}
DynamicPropertySymbol dps;
if( !(gsClass instanceof IGosuClass && ((IGosuClass)gsClass).isCompilingDefinitions()) &&
!verify( parsedElement, symbol == null || symbol instanceof DynamicPropertySymbol, Res.MSG_VARIABLE_ALREADY_DEFINED, strPropertyName ) )
{
return new DynamicPropertySymbol( dfs, bGetter );
}
if( symbol == null ||
(gsClass != null &&
gsClass.getMemberProperty( strPropertyName ) == null &&
gsClass.getStaticProperty( strPropertyName ) == null) )
{
dps = new DynamicPropertySymbol( dfs, bGetter );
dps.setClassMember( true );
if( symbol != null )
{
assert symbol instanceof DynamicPropertySymbol;
dps.setParent( (DynamicPropertySymbol)symbol );
}
return dps;
}
else if( !(symbol instanceof DynamicPropertySymbol) )
{
// Error already applied from declaration phase
return new DynamicPropertySymbol( dfs, bGetter );
}
assert symbol instanceof DynamicPropertySymbol;
dps = (DynamicPropertySymbol)symbol;
if( bGetter )
{
verify( parsedElement,
strPropertyName.equals( Keyword.KW_outer.getName() ) ||
dps.getImmediateGetterDfs() == null ||
dps.getImmediateGetterDfs() instanceof VarPropertyGetFunctionSymbol ||
dps.getImmediateGetterDfs().getValueDirectly() != null ||
dps.getImmediateGetterDfs() == dfs ||
(dps.getImmediateGetterDfs().isAbstract() && !dfs.isAbstract()) ||
(gsClass != null && gsClass.isInterface()),
Res.MSG_GETTER_FOR_PROPERTY_ALREADY_DEFINED,
strPropertyName );
if( parsedElement.hasParseException( Res.MSG_FUNCTION_ALREADY_DEFINED ) &&
parsedElement.hasParseException( Res.MSG_GETTER_FOR_PROPERTY_ALREADY_DEFINED ) )
{
//noinspection ThrowableResultOfMethodCallIgnored
parsedElement.removeParseException( Res.MSG_FUNCTION_ALREADY_DEFINED );
}
dps.setGetterDfs( dfs );
}
else
{
verify( parsedElement,
dps.getImmediateSetterDfs() == null ||
dps.getImmediateSetterDfs() instanceof VarPropertySetFunctionSymbol ||
dps.getImmediateSetterDfs().getValueDirectly() != null ||
dps.getImmediateSetterDfs() == dfs ||
(dps.getImmediateSetterDfs().isAbstract() && !dfs.isAbstract()) ||
(gsClass != null && gsClass.isInterface()),
Res.MSG_SETTER_FOR_PROPERTY_ALREADY_DEFINED,
strPropertyName );
if( parsedElement.hasParseException( Res.MSG_FUNCTION_ALREADY_DEFINED ) &&
parsedElement.hasParseException( Res.MSG_SETTER_FOR_PROPERTY_ALREADY_DEFINED ) )
{
//noinspection ThrowableResultOfMethodCallIgnored
parsedElement.removeParseException( Res.MSG_FUNCTION_ALREADY_DEFINED );
}
dps.setSetterDfs( dfs );
}
return dps;
}
@SuppressWarnings({"ConstantConditions"})
private FunctionStatement parseBaseConstructorDefinition( boolean bConstructor, List<IGosuAnnotation> defnAnnotations, ClassScopeCache scopeCache )
{
final IGosuClassInternal gsClass = getGosuClass();
Token T = new Token();
getSymbolTable().pushScope();
try
{
String strFunctionName;
if( bConstructor )
{
strFunctionName = gsClass.getRelativeName();
}
else
{
match( T, SourceCodeTokenizer.TT_WORD );
strFunctionName = T._strValue;
}
// String strNameInSource = T._strValue == null ? "" : T._strValue;
// getOwner().addNameInDeclaration( strFunctionName, iOffsetName-9, iLineName, iColumnName, true );
// Since we're going with a two-pass approach the symbols will already be in the table, but w/o values.
// So we don't want to check for already-defined functions here -- we're going to overwrite them with
// these identical symbols, but with values.
//verify( _symTable.getSymbol( strFunctionName ) == null, strFunctionName + Res.MSG_VARIABLE_ALREADY_DEFINED ) );
match( null, '(' );
List<ISymbol> args;
IType[] argTypes;
FunctionStatement functionStmt = new ConstructorStatement( bConstructor );
int iOffsetParamList = getTokenizer().getTokenStart();
int iColumnParamList = getTokenizer().getTokenColumn();
int iLineParamList = getTokenizer().getLineNumber();
if( !match( null, null, ')', true ) )
{
pushClassSymbols( false, scopeCache );
try
{
args = getOwner().parseParameterDeclarationList( functionStmt, false, null );
}
finally
{
popClassSymbols();
}
argTypes = new IType[args.size()];
for( int i = 0; i < args.size(); i++ )
{
getSymbolTable().putSymbol( args.get( i ) );
argTypes[i] = args.get( i ).getType();
}
}
else
{
argTypes = IType.EMPTY_ARRAY;
args = Collections.emptyList();
pushExpression( new ParameterListClause() );
setLocation( iOffsetParamList, iLineParamList, iColumnParamList, getTokenizer().getTokenStart() <= iOffsetParamList, true );
popExpression();
}
match( null, ')' );
if( match( null, ":", SourceCodeTokenizer.TT_OPERATOR ) )
{
getOwner().parseTypeLiteral();
Expression expression = popExpression();
verify( expression, false, Res.MSG_NO_TYPE_AFTER_CONSTRUCTOR );
}
StatementList stmtList;
int iOffset = getOwner().getTokenizer().getTokenStart();
int iLineNum = getOwner().getTokenizer().getLineNumber();
int iColumn = getOwner().getTokenizer().getTokenColumn();
FunctionType ft = new FunctionType( gsClass.getRelativeName(), gsClass, argTypes );
ft.setScriptPart( getOwner().getScriptPart() );
getOwner().pushParsingFunction( ft );
DynamicFunctionSymbol dfsDecl = findConstructorFunction( gsClass, DynamicFunctionSymbol.getSignatureName( strFunctionName, args ) );
dfsDecl = (dfsDecl == null || dfsDecl.getType() == GosuTypes.DEF_CTOR_TYPE()) ? null : dfsDecl;
functionStmt = dfsDecl == null ? functionStmt : dfsDecl.getDeclFunctionStmt();
verify( functionStmt, dfsDecl != null, Res.MSG_EXPECTING_NAME_FUNCTION_DEF );
if( verify( functionStmt, match( null, '{' ), Res.MSG_EXPECTING_OPEN_BRACE_FOR_CONSTRUCTOR_DEF ) )
{
IGosuClassInternal superClass = gsClass.getSuperClass();
if( superClass != null )
{
if( gsClass.isAnonymous() )
{
List<? extends IConstructorInfo> declaredConstructors = gsClass.getTypeInfo().getDeclaredConstructors();
if( verifyCallSiteCtorImpled( functionStmt, declaredConstructors ) )
{
verify( functionStmt, declaredConstructors.size() <= 1, Res.MSG_SINGLE_ANON_CTOR );
}
}
// If it's an enum, there's no default super constructor: the enum class extends the Enum java class
// which requires a String and an int. Those arguments are automatically generated by the compiler.
if( gsClass.getSupertype().getGenericType() != JavaTypes.ENUM() )
{
DynamicFunctionSymbol superDefaultConstructor = superClass.getDefaultConstructor();
verify( functionStmt,
match( T, Keyword.KW_super, true ) ||
match( T, Keyword.KW_this, true ) ||
(superDefaultConstructor != null && superClass.isAccessible( getGosuClass(), superDefaultConstructor )),
Res.MSG_NO_DEFAULT_CTOR_IN, superClass.getName() );
}
}
else if( gsClass.isAnonymous() ) // anon on interface
{
if( verify( functionStmt, gsClass.getTypeInfo().getDeclaredConstructors().size() <= 1, Res.MSG_SINGLE_ANON_CTOR ) )
{
verify( functionStmt, argTypes.length == 0, Res.MSG_ANON_CTOR_PARAMS_CONFLICT_WITH_CALL_SITE );
}
}
// No need to push an isolated scope here because there are no indexed
// symbol involved. This scope is only to resolve relative constructor
// calls from within a constructor e.g., this( foo ), super( foo ), etc.
boolean bMoreStatements = true;
MethodCallStatement initializer = null;
boolean bSuperOrThisCall = (match( T, Keyword.KW_super, true ) || match( T, Keyword.KW_this, true )) && getTokenizer().lookaheadType( 1, true ) == '(';
if( bSuperOrThisCall )
{
// Has to be static scope here since the JVM verifier prevents explicitly passing 'this' to super()
pushClassSymbols( true, scopeCache );
try
{
putSuperAndThisConstructorSymbols();
// Push static class members in case they are referenced as args in super( xxx ) or this( xxx )
bMoreStatements = getOwner().parseStatement();
initializer = (MethodCallStatement)popStatement();
}
finally
{
popClassSymbols();
}
}
else if( superClass != null )
{
MethodCallExpression e = new MethodCallExpression();
e.setParent( getClassStatement() );
DynamicFunctionSymbol defaultSuperConstructor;
// Enums implicitly call a super function that takes a String and an int, not a no-arg method
if( gsClass.getSupertype().getGenericType() == JavaTypes.ENUM() )
{
defaultSuperConstructor = superClass.getConstructorFunction( "Enum(java.lang.String, int)" );
}
else
{
defaultSuperConstructor = superClass.getDefaultConstructor();
}
if( defaultSuperConstructor != null )
{
e.setFunctionSymbol( new SuperConstructorFunctionSymbol( defaultSuperConstructor ) );
e.setArgs( null );
e.setType( GosuParserTypes.NULL_TYPE() );
initializer = new MethodCallStatement();
initializer.setMethodCall( e );
e.setParent( initializer );
initializer.setParent( functionStmt );
}
}
else
{
MethodCallExpression e = new MethodCallExpression();
e.setParent( getClassStatement() );
e.setFunctionSymbol( new InitConstructorFunctionSymbol( getSymbolTable() ) );
e.setArgs( null );
e.setType( GosuParserTypes.NULL_TYPE() );
initializer = new MethodCallStatement();
initializer.setMethodCall( e );
e.setParent( initializer );
initializer.setParent( functionStmt );
}
ArrayList<Statement> statements = new ArrayList<Statement>( 8 );
if( bMoreStatements )
{
pushClassSymbols( false, scopeCache );
getOwner().pushParsingAbstractConstructor( getClassStatement().getGosuClass().isAbstract() );
getSymbolTable().pushScope();
try
{
getSymbolTable().putSymbol( new Symbol( Keyword.KW_this.getName(), TypeLord.getConcreteType( gsClass ), getSymbolTable(), null ) );
getSymbolTable().putSymbol( new Symbol( Keyword.KW_super.getName(),
superClass == null ? IGosuClassInternal.Util.getGosuClassFrom( JavaTypes.OBJECT() ) :
superClass, getSymbolTable(), null ) );
getOwner().parseStatementsAndDetectUnreachable( statements );
}
finally
{
getSymbolTable().popScope();
getOwner().popParsingAbstractConstructor();
popClassSymbols();
}
}
verify( functionStmt, match( null, '}' ), Res.MSG_EXPECTING_CLOSE_BRACE_FOR_CONSTRUCTOR_DEF );
stmtList = new StatementList( getSymbolTable() );
stmtList.setStatements( statements );
Statement statement = isDontOptimizeStatementLists() ? stmtList : stmtList.getSelfOrSingleStatement();
if( statement == stmtList )
{
pushStatement( statement );
setLocation( iOffset, iLineNum, iColumn );
popStatement();
}
if( dfsDecl != null )
{
dfsDecl.setArgs( args );
dfsDecl.setValueDirectly( statement );
dfsDecl.setInitializer( initializer );
dfsDecl.getModifierInfo().setAnnotations( defnAnnotations );
}
}
else
{
eatStatementBlock( functionStmt, Res.MSG_EXPECTING_OPEN_BRACE_FOR_FUNCTION_DEF );
NotAStatement nas = new NotAStatement();
pushStatement( nas );
setLocation( iOffset, iLineNum, iColumn );
popStatement();
if( dfsDecl != null )
{
dfsDecl.setArgs( args );
dfsDecl.setValueDirectly( nas);
dfsDecl.getModifierInfo().setAnnotations( defnAnnotations );
}
}
getOwner().pushDynamicFunctionSymbol( dfsDecl );
if( functionStmt != null )
{
functionStmt.setDynamicFunctionSymbol( dfsDecl );
pushStatement( functionStmt );
}
return functionStmt;
}
finally
{
getSymbolTable().popScope();
if( getOwner().isParsingFunction() )
{
getOwner().popParsingFunction();
}
}
}
private boolean verifyCallSiteCtorImpled( FunctionStatement functionStmt, List<? extends IConstructorInfo> declaredConstructors )
{
if( declaredConstructors.size() != 2 )
{
return true;
}
for( IConstructorInfo ctor: declaredConstructors )
{
if( ctor instanceof GosuConstructorInfo )
{
if( !verify( functionStmt, !((GosuConstructorInfo)ctor).getDfs().getType().getName().equals( GosuTypes.DEF_CTOR_TYPE().getName() ), Res.MSG_ANON_CTOR_PARAMS_CONFLICT_WITH_CALL_SITE ) )
{
// The ctor from the call site is on super, but not impled by this ctor, therefore it implements the wrong one
return false;
}
}
}
return true;
}
private DynamicFunctionSymbol findConstructorFunction( IGosuClassInternal gsClass, String signatureName )
{
gsClass.compileDeclarationsIfNeeded();
DynamicFunctionSymbol dfs = gsClass.getParseInfo().getConstructorFunctions().get( signatureName );
if( dfs != null && dfs.getValueDirectly() != null )
{
dfs = GosuParser.assignPossibleDuplicateDfs( dfs, gsClass.getParseInfo().getConstructorFunctions().values() );
}
return dfs;
}
/**
* Alias super's ctors and this class's ctors as super(xxx) and this(xxx).
*/
private void putSuperAndThisConstructorSymbols()
{
IGosuClassInternal thisClass = getGosuClass();
IGosuClassInternal superClass = thisClass.getSuperClass();
if( superClass != null )
{
for( DynamicFunctionSymbol dfs : superClass.getConstructorFunctions() )
{
if( superClass.isAccessible( getGosuClass(), dfs ) )
{
dfs = new SuperConstructorFunctionSymbol( superClass.isParameterizedType()
? dfs.getParameterizedVersion( superClass )
: dfs );
getSymbolTable().putSymbol( dfs );
getOwner().putDfsDeclInSetByName( dfs );
}
}
}
for( DynamicFunctionSymbol dfs : thisClass.getConstructorFunctions() )
{
dfs = new ThisConstructorFunctionSymbol( dfs );
getSymbolTable().putSymbol( dfs );
getOwner().putDfsDeclInSetByName( dfs );
}
}
private boolean isCyclicInheritance( IType superType, IGosuClassInternal gsClass )
{
if( TypeLord.getPureGenericType( superType ) == gsClass )
{
return true;
}
if( superType != null && superType instanceof IGosuClassInternal )
{
if( isCyclicInheritance( ((IGosuClassInternal)superType).getSuperClass(), gsClass ) )
{
return true;
}
if( isCyclicInheritance( ((IGosuClassInternal)superType).getEnclosingType(), gsClass ) )
{
return true;
}
}
return superType instanceof IGosuClassInternal &&
isCyclicInterfaceInheritance( (IGosuClassInternal)superType, gsClass );
}
private boolean isCyclicInterfaceInheritance( IGosuClassInternal gsExtendee, IGosuClass gsExtendor )
{
if( gsExtendee == gsExtendor )
{
return true;
}
IType[] interfaces = gsExtendee.getInterfaces();
for( int i = 0; i < interfaces.length; i++ )
{
IType type = interfaces[i];
if( type instanceof ErrorType )
{
return false;
}
IGosuClassInternal gsClass = IGosuClassInternal.Util.getGosuClassFrom( type );
if( isCyclicInterfaceInheritance( gsClass, gsExtendor ) )
{
return true;
}
}
return false;
}
@Override
IGosuClassInternal getGosuClass()
{
return (IGosuClassInternal)super.getGosuClass();
}
@Override
public String toString()
{
IGosuClassInternal gosuClass = getGosuClass();
return "Parsing Class: " + (gosuClass == null ? "null" : gosuClass.getName());
}
}
| gosu-core/src/main/java/gw/internal/gosu/parser/GosuClassParser.java | /*
* Copyright 2014 Guidewire Software, Inc.
*/
package gw.internal.gosu.parser;
import gw.internal.gosu.ir.transform.util.IRTypeResolver;
import gw.internal.gosu.parser.expressions.BlockExpression;
import gw.internal.gosu.parser.expressions.ClassDeclaration;
import gw.internal.gosu.parser.expressions.InterfacesClause;
import gw.internal.gosu.parser.expressions.MethodCallExpression;
import gw.internal.gosu.parser.expressions.NameInDeclaration;
import gw.internal.gosu.parser.expressions.NullExpression;
import gw.internal.gosu.parser.expressions.ParameterListClause;
import gw.internal.gosu.parser.expressions.SuperTypeClause;
import gw.internal.gosu.parser.expressions.TypeLiteral;
import gw.internal.gosu.parser.expressions.TypeVariableDefinitionImpl;
import gw.internal.gosu.parser.statements.ClassStatement;
import gw.internal.gosu.parser.statements.ConstructorStatement;
import gw.internal.gosu.parser.statements.DelegateStatement;
import gw.internal.gosu.parser.statements.FunctionStatement;
import gw.internal.gosu.parser.statements.MethodCallStatement;
import gw.internal.gosu.parser.statements.NamespaceStatement;
import gw.internal.gosu.parser.statements.NoOpStatement;
import gw.internal.gosu.parser.statements.NotAStatement;
import gw.internal.gosu.parser.statements.PropertyStatement;
import gw.internal.gosu.parser.statements.ReturnStatement;
import gw.internal.gosu.parser.statements.StatementList;
import gw.internal.gosu.parser.statements.UsesStatement;
import gw.internal.gosu.parser.statements.VarInitializationVerifier;
import gw.internal.gosu.parser.statements.VarStatement;
import gw.lang.annotation.UsageTarget;
import gw.lang.ir.IRType;
import gw.lang.parser.GlobalScope;
import gw.lang.parser.GosuParserTypes;
import gw.lang.parser.IBlockClass;
import gw.lang.parser.IDynamicFunctionSymbol;
import gw.lang.parser.IFunctionSymbol;
import gw.lang.parser.IParseIssue;
import gw.lang.parser.IParseTree;
import gw.lang.parser.IParsedElement;
import gw.lang.parser.IParsedElementWithAtLeastOneDeclaration;
import gw.lang.parser.IParserState;
import gw.lang.parser.IReducedDynamicFunctionSymbol;
import gw.lang.parser.IScope;
import gw.lang.parser.ISymbol;
import gw.lang.parser.ISymbolTable;
import gw.lang.parser.IToken;
import gw.lang.parser.ITokenizerOffsetMarker;
import gw.lang.parser.ITypeUsesMap;
import gw.lang.parser.Keyword;
import gw.lang.parser.ScriptPartId;
import gw.lang.parser.exceptions.NotImplementedParseException;
import gw.lang.parser.exceptions.ObsoleteConstructorWarning;
import gw.lang.parser.exceptions.ParseException;
import gw.lang.parser.exceptions.ParseIssue;
import gw.lang.parser.exceptions.ParseResultsException;
import gw.lang.parser.expressions.IMemberAccessExpression;
import gw.lang.parser.expressions.IModifierListClause;
import gw.lang.parser.expressions.IParameterDeclaration;
import gw.lang.parser.expressions.ITypeVariableDefinition;
import gw.lang.parser.expressions.ITypeVariableDefinitionExpression;
import gw.lang.parser.expressions.Variance;
import gw.lang.parser.resources.Res;
import gw.lang.parser.resources.ResourceKey;
import gw.lang.parser.statements.IClassStatement;
import gw.lang.parser.statements.IFunctionStatement;
import gw.lang.parser.statements.ITerminalStatement;
import gw.lang.parser.statements.IUsesStatementList;
import gw.lang.reflect.FunctionType;
import gw.lang.reflect.IConstructorInfo;
import gw.lang.reflect.IEnhanceableType;
import gw.lang.reflect.IErrorType;
import gw.lang.reflect.IFeatureInfo;
import gw.lang.reflect.IFunctionType;
import gw.lang.reflect.IInvocableType;
import gw.lang.reflect.IMethodInfo;
import gw.lang.reflect.IParameterInfo;
import gw.lang.reflect.IPropertyInfo;
import gw.lang.reflect.IRelativeTypeInfo;
import gw.lang.reflect.IType;
import gw.lang.reflect.ITypeInfo;
import gw.lang.reflect.ITypeVariableType;
import gw.lang.reflect.MethodList;
import gw.lang.reflect.Modifier;
import gw.lang.reflect.TypeSystem;
import gw.lang.reflect.gs.ClassType;
import gw.lang.reflect.gs.IGenericTypeVariable;
import gw.lang.reflect.gs.IGosuClass;
import gw.lang.reflect.gs.IGosuClassParser;
import gw.lang.reflect.gs.IGosuEnhancement;
import gw.lang.reflect.gs.IGosuMethodInfo;
import gw.lang.reflect.gs.IGosuProgram;
import gw.lang.reflect.gs.ISourceFileHandle;
import gw.lang.reflect.gs.StringSourceFileHandle;
import gw.lang.reflect.java.GosuTypes;
import gw.lang.reflect.java.IJavaType;
import gw.lang.reflect.java.JavaTypes;
import gw.util.DynamicArray;
import gw.util.GosuExceptionUtil;
import gw.util.GosuObjectUtil;
import gw.util.GosuStringUtil;
import gw.util.Stack;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
*/
@SuppressWarnings({"ThrowableInstanceNeverThrown"})
public class GosuClassParser extends ParserBase implements IGosuClassParser, ITokenizerOffsetMarker
{
private int _iClassOffset;
private int _iClassLineNum;
private int _iClassColumn;
private ClassStatement _classStmt;
private Stack<IGosuClassInternal> _innerClasses;
private int _innerClassOffset;
public GosuClassParser( GosuParser owner )
{
super( owner );
_innerClasses = new Stack<IGosuClassInternal>();
}
//## todo: maybe ctors should set the class here so that subsequent calls to parseXxx() don't need to take a IGosuClass
private GosuClassParser( GosuParser owner, IGosuClassInternal innerClass )
{
super( owner );
int mark = ((InnerClassFileSystemSourceFileHandle)innerClass.getSourceFileHandle()).getMark();
if( mark >= 0 )
{
getTokenizer().restoreToMark( mark );
}
else
{
goToPosition( innerClass.getSourceFileHandle().getOffset() );
}
_innerClassOffset = getTokenizer().mark();
_innerClasses = new Stack<IGosuClassInternal>();
}
public static void parseAnonymousInnerClass( GosuParser gosuParser, IGosuClassInternal innerGsClass )
{
Stack<BlockExpression> enclosingBlocks = gosuParser._blocks;
gosuParser.setBlocks( null );
Map<String, List<IFunctionSymbol>> restoreDfsDecls = copyDFSDecls( gosuParser );
try
{
new GosuClassParser( gosuParser, innerGsClass ).parseHeader(innerGsClass, false, true, true );
new GosuClassParser( gosuParser, innerGsClass ).parseDeclarations( innerGsClass );
if( !gosuParser.getContextType().isMethodScoring() )
{
new GosuClassParser( gosuParser, innerGsClass ).parseDefinitions( innerGsClass );
}
}
finally
{
gosuParser.setDfsDeclInSetByName( restoreDfsDecls );
gosuParser.setBlocks( enclosingBlocks );
}
}
@Override
protected String getScript()
{
return getOwner().getScript();
}
@Override
public int getLineNumShift()
{
return getOwner().getLineNumShift();
}
@Override
public int getOffsetShift()
{
return getOwner().getOffsetShift();
}
@Override
public int getOffsetMark()
{
if( isInnerClass( getGosuClass() ) )
{
return _innerClassOffset;
}
return -1;
}
@Override
public ClassStatement getClassStatement()
{
return _classStmt;
}
private void setClassStatement( ClassStatement classStmt )
{
if( classStmt == null )
{
throw new IllegalArgumentException( "Class stmt is null" );
}
_classStmt = classStmt;
}
private IGosuClassInternal getCurrentInnerClass()
{
return _innerClasses.isEmpty() ? null : _innerClasses.peek();
}
private void pushInnerClass( IGosuClassInternal gsInnerClass )
{
_innerClasses.push( gsInnerClass );
}
private IGosuClassInternal popInnerClass( IGosuClassInternal gsInnerClass )
{
IGosuClassInternal top = _innerClasses.pop();
if( top != gsInnerClass )
{
throw new IllegalStateException( "Unbalanced push/pop for inner classes" );
}
return top;
}
private boolean isInnerClassesEmpty()
{
return _innerClasses.isEmpty();
}
/**
* Parses all declarations including:<br>
* <ul>
* <li> Fields
* <li> Methods
* <li> Properties
* <li> Inner types, recursively
* </ul>
*/
public void parseDeclarations( IGosuClass gsCls )
{
IGosuClassInternal gsClass = (IGosuClassInternal)gsCls;
if( gsClass.isDeclarationsCompiled() )
{
if( !gsClass.isInnerDeclarationsCompiled() )
{
if( parseDeclarationsOfLeftOverInnerClasses( gsClass ) )
{
gsClass.setInnerDeclarationsCompiled();
}
}
return;
}
boolean bPushedScope = pushScopeIfNeeded( gsClass );
getTokenizer().pushOffsetMarker( this );
ScriptPartId scriptPartId = new ScriptPartId( gsClass, null );
getOwner().pushScriptPart( scriptPartId );
GosuClassCompilingStack.pushCompilingType( gsClass );
gsClass.setCompilingDeclarations( true );
try
{
ClassStatement classStmt = (ClassStatement)gsClass.getClassStatement();
try
{
setClassStatement( classStmt );
}
catch( Exception e )
{
throw GosuExceptionUtil.forceThrow( e, gsClass.getName() );
}
if( isTopLevelClass( gsClass ) || TypeLord.isEvalProgram( gsClass ) )
{
classStmt.getClassFileStatement().clearParseTreeInformation();
}
// Don't need an isolated scope here because class members are all dynamic
// and, therefore, don't have to be indexed wrt an isolated scope.
getSymbolTable().pushScope();
try
{
//## todo: reparsing header with annotations this time, any chance we can do that the first time we parse the header, so we can avoid doing it twice?
String strClassName = parseHeader(gsClass, false, false, true);
if( gsClass instanceof IGosuEnhancementInternal )
{
parseEnhancementBodyDecl( gsClass );
}
else
{
parseClassBodyDecl( strClassName, gsClass );
}
}
finally
{
getSymbolTable().popScope();
pushStatement( classStmt );
setLocation( _iClassOffset, _iClassLineNum, _iClassColumn, true );
popStatement();
if( isTopLevelClass( gsClass ) || TypeLord.isEvalProgram( gsClass ) )
{
pushStatement( classStmt.getClassFileStatement() );
setLocation( 0, 1, _iClassColumn, true );
popStatement();
}
}
classStmt.compactParseTree();
}
finally
{
gsClass.setCompilingDeclarations( false );
// Do not set decls compiled; we do that in parseClassBodyDecl(). Also the decls may not have actually been compiled
//gsClass.setDeclarationsCompiled();
GosuClassCompilingStack.popCompilingType();
getOwner().popScriptPart( scriptPartId );
popScopeIfNeeded( bPushedScope, gsClass );
getTokenizer().popOffsetMarker( this );
removeTypeVarsFromParserMap( gsClass );
}
}
private boolean isTopLevelClass( IGosuClassInternal gsClass )
{
return gsClass.getEnclosingType() == null;
}
// /**
// * Extend the bounds of the enclosing ClassFileStatement if need be. Note this is only necessary when
// * the enclosing class has errors and, therefore, may not have parsed elements with the
// */
// private void extendEnclosingClassFileBounds( IParsedElement enclosingClassFileStmt )
// {
// if( enclosingClassFileStmt.getLocation() != null )
// {
// int iExtentDelta = enclosingClassFileStmt.getLocation().getExtent() - getClassStatement().getClassFileStatement().getLocation().getExtent();
// if( iExtentDelta < 0 )
// {
// enclosingClassFileStmt.getLocation().setLength( enclosingClassFileStmt.getLocation().getLength() + -iExtentDelta );
// }
// }
// }
public void parseDefinitions( IGosuClass gsCls )
{
IGosuClassInternal gsClass = (IGosuClassInternal)gsCls;
getTokenizer().pushOffsetMarker( this );
boolean bPushedScope = pushScopeIfNeeded( gsClass );
gsClass.setCompilingDefinitions( true );
GosuClassParseInfo parseInfo = gsClass.getParseInfo();
ClassStatement classStmt = parseInfo.getClassStatement();
setClassStatement( classStmt );
clearParseTree( gsClass );
ScriptPartId scriptPartId = new ScriptPartId( gsClass, null );
getOwner().pushScriptPart( scriptPartId );
GosuClassCompilingStack.pushCompilingType( gsClass );
getOwner()._iReturnOk++;
if( isDeprecated( (ModifierInfo)gsCls.getModifierInfo() ) )
{
getOwner().pushIgnoreTypeDeprecation();
}
try
{
try
{
if( !gsClass.isDefinitionsCompiled() )
{
// Don't need an isolated scope here because class members are all dynamic
// and, therefore, don't have to be indexed wrt an isolated scope.
getSymbolTable().pushScope();
try
{
//
// Reset the tokenizer to prepare for secon.. er third pass
//
getTokenizer().reset();
if( isTopLevelClass( gsClass ) || TypeLord.isEvalProgram( gsClass ) )
{
getLocationsList().clear();
}
else
{
removeInnerClassDelcarationsFromLocationsList( gsClass );
}
//
// Parse the whole class, including inner types
//
// Note function definitions are parsed as no-op statements, but are
// pushed onto the dynamic function symobl stack.
//## todo: do we really need to parse the header *again* (maybe for annotations?)
parseHeader(gsClass, false, false, true );
if( gsClass instanceof IGosuEnhancementInternal )
{
parseClassStatementAsEnhancement( gsClass );
}
else
{
parseClassStatement();
}
}
finally
{
getSymbolTable().popScope();
if( gsClass instanceof IGosuProgramInternal )
{
((IGosuProgramInternal)gsClass).setParsingExecutableProgramStatements( true );
try
{
FunctionStatement fs = parseExecutableProgramStatements( (IGosuProgramInternal)gsClass );
makeExprRootFunction( (IGosuProgramInternal)gsClass, fs );
}
finally
{
((IGosuProgramInternal)gsClass).setParsingExecutableProgramStatements( false );
}
}
boolean b = isInnerClass( gsClass ) || match( null, SourceCodeTokenizer.TT_EOF );
if( !verify( classStmt, b, Res.MSG_END_OF_STMT ) )
{
consumeTrailingTokens();
}
gsClass.setDefinitionsCompiled();
}
}
if( isTopLevelClass( gsClass ) || TypeLord.isEvalProgram( gsClass ) )
{
getOwner().setParsed( true );
}
}
finally
{
pushStatement( classStmt );
setLocation( _iClassOffset, _iClassLineNum, _iClassColumn, true );
if( isTopLevelClass( gsClass ) || TypeLord.isEvalProgram( gsClass ) )
{
popStatement();
pushStatement( classStmt.getClassFileStatement() );
setLocation( 0, 1, _iClassColumn, true );
popStatement();
}
assignTokens( classStmt );
}
try
{
verifyParsedElement( isInnerClass( gsClass ) && !TypeLord.isEvalProgram( gsClass ) ? classStmt : classStmt.getClassFileStatement() );
}
catch( ParseResultsException pre )
{
gsClass.setParseResultsException( pre );
}
}
finally
{
try
{
gsClass.setCompilingDefinitions( false );
gsClass.setDefinitionsCompiled();
getOwner().popScriptPart( scriptPartId );
}
finally
{
GosuClassCompilingStack.popCompilingType();
}
popScopeIfNeeded( bPushedScope, gsClass );
getTokenizer().popOffsetMarker( this );
removeTypeVarsFromParserMap( gsClass );
getOwner()._iReturnOk--;
pushStatement( _classStmt.getClassFileStatement() );
setLocation( 0, 1, _iClassColumn, true );
popStatement();
if( isDeprecated( (ModifierInfo)gsCls.getModifierInfo() ) )
{
getOwner().popIgnoreTypeDeprecation();
}
gsClass.syncGenericAndParameterizedClasses();
getOwner().clearDfsStack();
_classStmt = null;
VarInitializationVerifier.verifyFinalFields( gsClass );
VarInitializationVerifier.verifyLocalVars( gsClass, true );
if( isTopLevelClass( gsClass ) )
{
postDefinitionVerify(classStmt);
}
}
}
private void postDefinitionVerify( IClassStatement classStmt )
{
if( classStmt == null )
{
return;
}
IGosuClass gsClass = classStmt.getGosuClass();
if( gsClass.isAnonymous() || gsClass instanceof IBlockClass )
{
return;
}
CompileTimeAnnotationHandler.postDefinitionVerification( classStmt );
for( IGosuClass innerClass: classStmt.getGosuClass().getInnerClasses() )
{
postDefinitionVerify( innerClass.getClassStatement() );
}
}
private void removeInnerClassDelcarationsFromLocationsList( IGosuClassInternal gsClass )
{
List<ParseTree> locations = getLocationsList();
for( int i = locations.size()-1; i >= 0; i-- )
{
ParseTree csr = locations.get( i );
if( csr.getScriptPartId().getContainingType() == gsClass )
{
IParseTree parent = csr.getParent();
if( parent != null )
{
parent.removeChild( csr );
}
locations.remove( csr );
}
else
{
break;
}
}
}
private void consumeTrailingTokens()
{
while( !match( null, SourceCodeTokenizer.TT_EOF ) )
{
getTokenizer().nextToken();
}
}
private void assignTokens( ClassStatement classStmt )
{
if( !getOwner().isEditorParser() )
{
return;
}
if( !isTopLevelClass( classStmt.getGosuClass() ) )
{
return;
}
List<Token> tokens = getOwner().getTokenizer().getTokens().toList();
classStmt.getClassFileStatement().assignTokens( tokens );
//## todo: handle programs (see GosuAstTransformer)
// String strSource = getGosuClass().getSource();
// String strTextFromParseTree = classStmt.getClassFileStatement().getLocation().getTextFromTokens();
// if( !strSource.equals( strTextFromParseTree ) )
// {
// int[] diff = getDiffOffset( strSource, strTextFromParseTree );
//
// throw new IllegalStateException( buildInconsistentParseErrorMessage( strSource, strTextFromParseTree, diff ) );
// }
//noinspection LoopStatementThatDoesntLoop
for( IToken token : tokens )
{
throw new IllegalStateException( "One or more tokens were not assigned: " + token );
}
}
private String buildInconsistentParseErrorMessage( String strSource, String strTextFromParseTree, int[] diff )
{
return
"Parsed class, " + getGosuClass().getName() + ", inconsistent with source.\n" +
"Line: " + diff[1] + " Offset: " + diff[0] + "\n" +
"*** Parsed Version ***\n" +
ParseIssue.makeContextString( diff[1], strTextFromParseTree, diff[2] ) + "\n" +
"*** Source Version ***\n" +
ParseIssue.makeContextString( diff[1], strSource, diff[2] ) + "\n";
}
private int[] getDiffOffset( String strSource, String strTextFromParseTree )
{
if( strSource == null || strTextFromParseTree == null )
{
return null;
}
int i;
int iLineOffset = 0;
int iLine = 0;
for( i = 0; i < strSource.length(); i++ )
{
if( i >= strTextFromParseTree.length() )
{
return new int[] {i, iLine, iLineOffset};
}
char sourceChar = strSource.charAt( i );
char parserChar = strTextFromParseTree.charAt( i );
if( sourceChar != parserChar )
{
return new int[] {i, iLine, iLineOffset};
}
if( parserChar == '\n' )
{
iLine++;
iLineOffset = i;
}
}
return new int[] {i, iLine, iLineOffset};
}
private void clearParseTree( IGosuClassInternal gsClass )
{
if( (!(gsClass instanceof IGosuProgram) && isTopLevelClass( gsClass )) ||
TypeLord.isEvalProgram( gsClass ) )
{
gsClass.getClassStatement().getClassFileStatement().clearParseTreeInformation();
}
else
{
gsClass.getClassStatement().clearParseTreeInformation();
if( gsClass.isAnonymous() )
{
//noinspection SuspiciousMethodCalls
if( !getLocationsList().isEmpty() )
{
ParseTree last = getLocationsList().get( getLocationsList().size() - 1 );
if( last.getParsedElement() == null )
{
// Remove abandoned class-stmt parse tree from decl parse
getLocationsList().remove( last );
}
}
}
}
}
private boolean isInnerClass( IGosuClassInternal gsClass )
{
return gsClass.getEnclosingType() != null;
}
private FunctionStatement parseExecutableProgramStatements( IGosuProgramInternal gsClass )
{
List savedLocations = getOwner().getLocations();
getTokenizer().resetButKeepTokens();
getLocationsList().clear();
getOwner().setLocationsFromProgramClassParser( savedLocations );
parseHeader( gsClass, false, false, true );
gsClass.addCapturedProgramSymbols( getSymbolTable() );
FunctionStatement fs = parseProgramAsFunctionStatement( gsClass );
List newLocations = getOwner().getLocations();
removeRedundantUsesStatementList( newLocations );
getOwner().getLocationsList().clear();
getOwner().setLocationsFromProgramClassParser( null );
getOwner().getLocationsList().addAll( savedLocations );
getOwner().getLocationsList().addAll( newLocations );
return fs;
}
private void removeRedundantUsesStatementList( List newLocations )
{
for( int i = 0; i < newLocations.size(); i++ )
{
IParseTree pt = (IParseTree)newLocations.get( i );
if( pt.getParsedElement() instanceof IUsesStatementList )
{
newLocations.remove( i-- );
}
}
}
private void makeExprRootFunction( IGosuProgramInternal gsClass, FunctionStatement callableStmt )
{
DynamicFunctionSymbol dfsDecl = getProgramRootExprValueDfs();
if (dfsDecl != null) {
getOwner().putDfsDeclInSetByName( dfsDecl );
StatementList stmtList = makeReturnStatementWithExprRoot( gsClass, callableStmt );
if( stmtList != null )
{
FunctionStatement fs = new FunctionStatement();
fs.setDynamicFunctionSymbol( dfsDecl );
dfsDecl.setValueDirectly( stmtList );
getOwner().pushDynamicFunctionSymbol( dfsDecl );
fs.setDynamicFunctionSymbol( dfsDecl );
dfsDecl.setClassMember( true );
gsClass.getParseInfo().addMemberFunction(dfsDecl);
}
}
}
private StatementList makeReturnStatementWithExprRoot( IGosuProgramInternal gsClass, FunctionStatement callableStmt )
{
Statement statement = (Statement)callableStmt.getDynamicFunctionSymbol().getValueDirectly();
if( statement != null )
{
boolean[] bAbsolute = {false};
ITerminalStatement significantTerminalStatement = statement.getLeastSignificantTerminalStatement( bAbsolute );
if( gsClass.isGenRootExprAccess() &&
bAbsolute[0] &&
significantTerminalStatement instanceof ReturnStatement &&
significantTerminalStatement.getParent() != null &&
significantTerminalStatement.getParent().getParent() == callableStmt )
{
ReturnStatement rs = (ReturnStatement)significantTerminalStatement;
Expression expr = rs.getValue();
if( expr instanceof IMemberAccessExpression )
{
Expression rootExpr = (Expression)((IMemberAccessExpression)expr).getRootExpression();
ReturnStatement defaultReturnStmt = new ReturnStatement();
defaultReturnStmt.setValue( rootExpr );
List<Statement> stmts = new ArrayList<Statement>( 2 );
stmts.add( defaultReturnStmt );
StatementList stmtList = new StatementList( getSymbolTable() );
stmtList.setStatements( stmts );
return stmtList;
}
}
}
ReturnStatement defaultReturnStmt = new ReturnStatement();
NullExpression nullExpr = new NullExpression();
nullExpr.setType( JavaTypes.OBJECT() );
defaultReturnStmt.setValue( nullExpr );
List<Statement> stmts = new ArrayList<Statement>( 2 );
stmts.add( defaultReturnStmt );
StatementList stmtList = new StatementList( getSymbolTable() );
stmtList.setStatements( stmts );
return stmtList;
}
private DynamicFunctionSymbol getProgramRootExprValueDfs()
{
for( IDynamicFunctionSymbol dfs : getGosuClass().getMemberFunctions() )
{
if( dfs.getName().contains( "evaluateRootExpr" ) )
{
return (DynamicFunctionSymbol)dfs;
}
}
return null;
}
private FunctionStatement parseProgramAsFunctionStatement( IGosuClassInternal gsClass )
{
// Copy the Non-Static Scope so we can reuse it for each member
//
IScope nonstaticScope;
Map<String, List<IFunctionSymbol>> nonstaticDfsMap;
getSymbolTable().pushScope();
try
{
getOwner().newDfsDeclInSetByName();
gsClass.putClassMembers( getOwner(), getSymbolTable(), getGosuClass(), false );
nonstaticDfsMap = getOwner().getDfsDecls();
getOwner().newDfsDeclInSetByName();
}
finally
{
nonstaticScope = getSymbolTable().popScope();
}
getSymbolTable().pushScope();
getOwner().newDfsDeclInSetByName();
FunctionStatement functionStmt;
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
try
{
getOwner().setDfsDeclInSetByName( nonstaticDfsMap );
getOwner().putDfsDeclsInTable( ((IGosuProgramInternal)getGosuClass()).getSymbolTable() );
getSymbolTable().pushScope( nonstaticScope );
getOwner().pushParsingStaticMember( false );
try
{
functionStmt = getOwner().parseProgramEntryPointBody();
}
finally
{
getSymbolTable().popScope();
getOwner().popParsingStaticMember();
}
DynamicFunctionSymbol dfs = functionStmt == null ? null : functionStmt.getDynamicFunctionSymbol();
if( dfs != null )
{
dfs.setClassMember( true );
if( dfs.getDisplayName().equals( gsClass.getRelativeName() ) )
{
gsClass.getParseInfo().addConstructorFunction(dfs);
}
else
{
gsClass.getParseInfo().addMemberFunction(dfs);
}
}
}
finally
{
getOwner().newDfsDeclInSetByName();
getSymbolTable().popScope();
}
setLocation( iOffset, iLineNum, iColumn, true );
if( getTokenizer().getTokenStart() == iOffset )
{
getLocationsList().remove( getLocationsList().size() - 1 );
}
functionStmt = (FunctionStatement)popStatement();
return functionStmt;
}
private void parseClassBodyDecl( String strClassName, IGosuClassInternal gsClass )
{
try
{
if( strClassName != null )
{
IType type = TypeLoaderAccess.instance().getIntrinsicTypeByFullName( strClassName );
if( TypeSystem.getOrCreateTypeReference( gsClass ) != type && !(gsClass instanceof IGosuClassFragment) )
{
getClassStatement().addParseException( new ParseException( makeFullParserState(), Res.MSG_DUPLICATE_CLASS_FOUND, type.getName() ) );
}
}
}
catch( ClassNotFoundException e )
{
// ignore
}
maybeForceRecursiveTypeToAssignSuperTypes( gsClass );
verify( getClassStatement(), gsClass instanceof IGosuProgram || match( null, '{' ), Res.MSG_EXPECTING_OPEN_BRACE_FOR_CLASS_DEF );
if( !putClassMembersOfSuperAndInterfaces( gsClass ) )
{
gsClass.setDeclarationsBypassed();
return;
}
if( isInnerClass( gsClass ) && !gsClass.isStatic() )
{
addOuterMember( gsClass );
}
addAutomaticEnumMethodsAndProperties( gsClass );
processEnumConstants( gsClass );
for( Object member = parseFunctionOrConstructorOrFieldDeclaration( gsClass );
member != null;
member = parseFunctionOrConstructorOrFieldDeclaration( gsClass ) )
{
popStatement();
if( member instanceof DynamicFunctionSymbol )
{
processFunctionSymbol( (DynamicFunctionSymbol)member, gsClass );
}
else if( member instanceof DynamicPropertySymbol )
{
processPropertySymbol( (DynamicPropertySymbol)member, gsClass );
}
else
{
processVarStmt( gsClass, (VarStatement)member );
}
}
if( !gsClass.isInterface() )
{
if( !gsClass.ensureDefaultConstructor( getSymbolTable(), getOwner() ) )
{
getClassStatement().addParseException( new ParseException( makeFullParserState(),
Res.MSG_NO_DEFAULT_CTOR_IN,
gsClass.getSupertype().getName() ) );
}
}
boolean b = isInnerClass( gsClass ) || match( null, SourceCodeTokenizer.TT_EOF );
verify( getClassStatement(), b, Res.MSG_END_OF_STMT );
gsClass.addDelegateImpls( getSymbolTable(), this );
if( gsClass instanceof IGosuProgramInternal )
{
((IGosuProgramInternal)gsClass).addProgramEntryPoint( getSymbolTable(), this );
}
if( gsClass instanceof IGosuTemplateInternal )
{
((IGosuTemplateInternal)gsClass).addTemplateEntryPoints( getSymbolTable(), this );
}
gsClass.syncGenericAndParameterizedClasses();
gsClass.setDeclarationsCompiled();
if( parseDeclarationsOfLeftOverInnerClasses( gsClass ) )
{
gsClass.setInnerDeclarationsCompiled();
}
}
private void maybeForceRecursiveTypeToAssignSuperTypes( IGosuClassInternal gsClass )
{
if( gsClass.isParameterizedType() )
{
// If this is a recursive type, force super/interface assignment
gsClass.getSupertype();
gsClass.getInterfaces();
}
}
private boolean putClassMembersOfSuperAndInterfaces( IGosuClassInternal gsClass )
{
if( gsClass.isAnnotation() && JavaTypes.ANNOTATION().isAssignableFrom( gsClass ) )
{
// Don't try put members of implicitly extended java.lang.annotation.Annotation
return true;
}
ICompilableTypeInternal enclosingType = gsClass.getEnclosingType();
if( enclosingType instanceof IGosuClassInternal &&
((IGosuClassInternal)enclosingType).isHeaderCompiled() && TypeLord.encloses( enclosingType, getOwner().getGosuClass() ) )
{
enclosingType.putClassMembers( getOwner(), getSymbolTable(), getGosuClass(), gsClass.isStatic() );
}
for( IType type : gsClass.getInterfaces() )
{
if( !(type instanceof ErrorType) )
{
if( !putClassMembers( type ) )
{
return false;
}
}
}
return putClassMembers( gsClass.getSuperClass() );
}
private boolean putClassMembers( IType type )
{
IGosuClassInternal gsType = IGosuClassInternal.Util.getGosuClassFrom( type );
if( gsType != null )
{
gsType.compileDeclarationsIfNeeded();
if( !gsType.isDeclarationsCompiled() )
{
advanceToClassBodyEnd();
// Try again after enclosing class finishes
return false;
}
gsType.putClassMembers( getOwner(), getSymbolTable(), getGosuClass(), false );
}
return true;
}
private boolean parseDeclarationsOfLeftOverInnerClasses( IGosuClassInternal gsClass )
{
int iCount = 0;
int iPriorCount;
Collection<? extends IGosuClass> innerClasses = gsClass.getKnownInnerClassesWithoutCompiling().values();
do
{
iPriorCount = iCount;
iCount = 0;
for( IGosuClass c : innerClasses )
{
IGosuClassInternal innerClass = (IGosuClassInternal)c;
if( !innerClass.isDeclarationsCompiled() || !innerClass.isInnerDeclarationsCompiled() )
{
if( innerClass.getSourceFileHandle() instanceof InnerClassFileSystemSourceFileHandle )
{
int state = getTokenizer().mark();
parseInnerClassDeclaration( innerClass );
getTokenizer().restoreToMark( state );
}
iCount += (innerClass.isDeclarationsCompiled() && innerClass.isInnerDeclarationsCompiled()) ? 0 : 1;
}
}
if( iPriorCount > 0 && iPriorCount == iCount )
{
// Could not decl parse one or more inner classes, must be a cycle; will reparse later
return false;
}
} while( iCount > 0 );
return true;
}
private void addAutomaticEnumMethodsAndProperties( IGosuClassInternal gsClass )
{
if( gsClass.isEnum() )
{
addEnumProperty( gsClass, new EnumCodePropertySymbol( gsClass, TypeSystem.getCompiledGosuClassSymbolTable() ) );
addEnumProperty( gsClass, new EnumDisplayNamePropertySymbol( gsClass, TypeSystem.getCompiledGosuClassSymbolTable() ) );
addEnumProperty( gsClass, new EnumNamePropertySymbol( gsClass, TypeSystem.getCompiledGosuClassSymbolTable() ) );
addEnumProperty( gsClass, new EnumOrdinalPropertySymbol( gsClass, TypeSystem.getCompiledGosuClassSymbolTable() ) );
addEnumProperty( gsClass, new EnumValuePropertySymbol( gsClass, TypeSystem.getCompiledGosuClassSymbolTable() ) );
addEnumProperty( gsClass, new EnumAllValuesPropertySymbol( gsClass, TypeSystem.getCompiledGosuClassSymbolTable() ) );
DynamicFunctionSymbol dfs = new EnumValueOfFunctionSymbol( gsClass, TypeSystem.getCompiledGosuClassSymbolTable() );
gsClass.getParseInfo().addMemberFunction( dfs );
getOwner().putDfsDeclInSetByName( dfs );
dfs = new EnumValuesFunctionSymbol( gsClass, TypeSystem.getCompiledGosuClassSymbolTable() );
gsClass.getParseInfo().addMemberFunction( dfs );
getOwner().putDfsDeclInSetByName( dfs );
}
}
private void addEnumProperty( IGosuClassInternal gsClass, DynamicPropertySymbol dps )
{
gsClass.getParseInfo().addMemberProperty( dps );
getOwner().putDfsDeclInSetByName( dps.getGetterDfs() ); // put in dfs map to prevent overriding by enum impl class
}
private void processEnumConstants( IGosuClassInternal gsClass )
{
boolean bEnum = gsClass != null && gsClass.isEnum();
if( !bEnum )
{
return;
}
Token t = new Token();
int state = getTokenizer().mark();
boolean bAtLeastOneConst = false;
boolean bConst;
do
{
bConst = false;
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
if( match( t, null, SourceCodeTokenizer.TT_WORD, true ) &&
!Keyword.isKeyword( t._strValue ) &&
match( t, SourceCodeTokenizer.TT_WORD ) )
{
VarStatement varStmt = parseEnumConstantDecl( t._strValue );
varStmt.setNameOffset( t.getTokenStart(), t._strValue );
setLocation( iOffset, iLineNum, iColumn );
popStatement();
processVarStmt( gsClass, varStmt );
bAtLeastOneConst = bConst = true;
}
if( match( null, ';' ) )
{
break;
}
} while( bConst && match( null, ',' ) );
if( !bAtLeastOneConst )
{
getTokenizer().restoreToMark( state );
}
}
private VarStatement parseEnumConstantDecl( String strIdentifier )
{
VarStatement varStmt = new VarStatement();
ModifierInfo modifiers = new ModifierInfo( Modifier.PUBLIC | Modifier.STATIC | Modifier.FINAL );
varStmt.setModifierInfo( modifiers );
verify( varStmt, getSymbolTable().getSymbol( strIdentifier ) == null, Res.MSG_VARIABLE_ALREADY_DEFINED, strIdentifier );
if( match( null, null, '(', true ) )
{
eatParenthesized( varStmt, Res.MSG_EXPECTING_RIGHTPAREN_FUNCTION_DEF );
if( match( null, null, '{', true ) )
{
eatStatementBlock( varStmt, Res.MSG_EXPECTING_RIGHTBRACE_STMTBLOCK );
}
}
IType type = getGosuClass();
varStmt.setScope( GlobalScope.EXECUTION );
AbstractDynamicSymbol symbol = new DynamicSymbol( getGosuClass(), getSymbolTable(), strIdentifier, type, null );
modifiers.addAll( symbol.getModifierInfo() );
symbol.setModifierInfo( modifiers );
varStmt.setSymbol( symbol );
varStmt.setEnumConstant( true );
getSymbolTable().putSymbol( symbol );
pushStatement( varStmt );
return varStmt;
}
private void processVarStmt( IGosuClassInternal gsClass, VarStatement varStmt )
{
gsClass.getParseInfo().addMemberField(varStmt);
}
public void processFunctionSymbol( DynamicFunctionSymbol dfs, IGosuClassInternal gsClass )
{
getSymbolTable().putSymbol( dfs );
if( dfs.getDisplayName().equals( gsClass.getRelativeName() ) )
{
gsClass.getParseInfo().addConstructorFunction(dfs);
}
else
{
gsClass.getParseInfo().addMemberFunction(dfs);
}
}
void processPropertySymbol( DynamicPropertySymbol dps, ICompilableTypeInternal gsClass )
{
getSymbolTable().putSymbol( dps );
dps.addMemberSymbols( gsClass );
}
private void addOuterMember( ICompilableTypeInternal gsClass )
{
while( gsClass instanceof IBlockClass )
{
// blocks should never be considered part of the outer hierarchy
gsClass = gsClass.getEnclosingType();
}
DynamicFunctionSymbol dfs = new OuterFunctionSymbol( getSymbolTable(), gsClass );
dfs.setClassMember( true );
DynamicPropertySymbol dps = getOrCreateDynamicPropertySymbol( getClassStatement(), gsClass, dfs, true );
processPropertySymbol( dps, gsClass );
}
private void parseEnhancementBodyDecl( IGosuClassInternal gsClass )
{
try
{
IType type = TypeLoaderAccess.instance().getIntrinsicTypeByFullName( gsClass.getName() );
if( gsClass != type )
{
getClassStatement().addParseException( new ParseException( makeFullParserState(), Res.MSG_DUPLICATE_ENHANCEMENT_FOUND, type.getName() ) );
}
}
catch( ClassNotFoundException e )
{
// ignore
}
verify( getClassStatement(), match( null, '{' ), Res.MSG_EXPECTING_OPEN_BRACE_FOR_CLASS_DEF );
for( Object result = parseFunctionDeclForEnhancement( gsClass );
result != null;
result = parseFunctionDeclForEnhancement( gsClass ) )
{
if( !result.equals( Boolean.FALSE ) )
{
popStatement();
if( result instanceof DynamicFunctionSymbol )
{
DynamicFunctionSymbol dfs = (DynamicFunctionSymbol)result;
getSymbolTable().putSymbol( dfs );
gsClass.getParseInfo().addMemberFunction(dfs);
}
else if( result instanceof DynamicPropertySymbol )
{
getSymbolTable().putSymbol( (DynamicPropertySymbol)result );
((DynamicPropertySymbol)result).addMemberSymbols( gsClass );
}
}
}
verify( getClassStatement(), isInnerClass( gsClass ) || match( null, SourceCodeTokenizer.TT_EOF ), Res.MSG_END_OF_STMT );
gsClass.syncGenericAndParameterizedClasses();
gsClass.setDeclarationsCompiled();
gsClass.setInnerDeclarationsCompiled();
}
public List<ParseException> resolveFunctionAndPropertyDecls( ISymbolTable table )
{
for( Object member = parseFunctionOrConstructorOrFieldDeclaration( null );
member != null; member = parseFunctionOrConstructorOrFieldDeclaration( null ) )
{
popStatement();
if( member instanceof DynamicFunctionSymbol )
{
table.putSymbol( (DynamicFunctionSymbol)member );
}
else if( member instanceof DynamicPropertySymbol )
{
table.putSymbol( (DynamicPropertySymbol)member );
}
}
pushStatement( getClassStatement() );
setLocation( _iClassOffset, _iClassLineNum, _iClassColumn );
popStatement();
//noinspection RedundantCast,unchecked
return (List<ParseException>)(List)getClassStatement().getParseExceptions();
}
private Object parseFunctionDeclForEnhancement( IGosuClassInternal gsClass )
{
int[] location = new int[3];
Object rtn = _parseFunctionDeclForEnhancement( gsClass, location );
if( rtn != null && !Boolean.FALSE.equals( rtn ) )
{
setLocation( location[0], location[1], location[2] );
}
return rtn;
}
private Object _parseFunctionDeclForEnhancement( IGosuClassInternal gsClass, int[] location )
{
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
String strMemberKeyword[] = new String[1];
ModifierInfo modifiers = parseUntilMemberKeyword( strMemberKeyword, false, location );
if( modifiers.getModifiers() == -1 )
{
return null;
}
if( strMemberKeyword[0] != null && strMemberKeyword[0].equals( Keyword.KW_function.toString() ) )
{
FunctionStatement fs = new FunctionStatement();
DynamicFunctionSymbol dfs = getOwner().parseFunctionDecl( fs, false, false, modifiers );
fs.setDynamicFunctionSymbol( dfs );
pushStatement( fs );
verify( fs, !Modifier.isTransient( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_transient, Keyword.KW_function );
if( dfs != null )
{
dfs.setClassMember( true );
}
if( verify( getClassStatement(), !Modifier.isAbstract( modifiers.getModifiers() ), Res.MSG_MODIFIER_ABSTRACT_NOT_ALLOWED_HERE ) )
{
if( !Modifier.isNative( modifiers.getModifiers() ) )
{
eatStatementBlock( fs, Res.MSG_EXPECTING_OPEN_BRACE_FOR_FUNCTION_DEF );
}
}
// verifyTypeVarVariance( Variance.COVARIANT, fs, false, dfs.getType() );
return dfs;
}
else if( strMemberKeyword[0] != null && strMemberKeyword[0].equals( Keyword.KW_property.toString() ) )
{
boolean bGetter = match( null, Keyword.KW_get );
verify( getClassStatement(), bGetter || match( null, Keyword.KW_set ), Res.MSG_EXPECTING_PROPERTY_GET_OR_SET_MODIFIER );
FunctionStatement fs = new FunctionStatement();
DynamicFunctionSymbol dfs = getOwner().parseFunctionDecl( fs, true, bGetter, modifiers );
fs.setDynamicFunctionSymbol( dfs );
pushStatement( fs );
setLocation( iOffset, iLineNum, iColumn );
popStatement();
verify( fs, !Modifier.isTransient( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_transient, Keyword.KW_function );
if( dfs != null )
{
dfs.setClassMember( true );
}
if( verify( getClassStatement(), !Modifier.isAbstract( modifiers.getModifiers() ), Res.MSG_MODIFIER_ABSTRACT_NOT_ALLOWED_HERE ) )
{
if( !Modifier.isNative( modifiers.getModifiers() ) )
{
eatStatementBlock( fs, Res.MSG_EXPECTING_OPEN_BRACE_FOR_FUNCTION_DEF );
}
}
DynamicPropertySymbol dps = dfs == null ? null : getOrCreateDynamicPropertySymbol( getClassStatement(), gsClass, dfs, bGetter );
PropertyStatement statement = new PropertyStatement( fs, dps );
verifyPropertiesAreSymmetric( bGetter, dfs, dps, statement );
pushStatement( statement );
// if( bGetter )
// {
// verifyTypeVarVariance( Variance.COVARIANT, fs, false, dps.getGetterDfs().getReturnType() );
// }
// else if( dps.getSetterDfs().getArgTypes().length > 0 )
// {
// verifyTypeVarVariance( Variance.CONTRAVARIANT, fs, false, dps.getSetterDfs().getArgTypes()[0] );
// }
return dps;
}
else if( strMemberKeyword[0] != null && strMemberKeyword[0].equals( Keyword.KW_var.toString() ) )
{
return Boolean.FALSE;
}
return null;
}
private void parseClassStatementAsEnhancement( IGosuClassInternal gsClass )
{
//## todo: remove this scope?
IGosuEnhancementInternal enhancement = (IGosuEnhancementInternal)gsClass;
getSymbolTable().pushScope();
try
{
verify( getClassStatement(), match( null, '{' ), Res.MSG_EXPECTING_OPEN_BRACE_FOR_CLASS_DEF );
parseClassMembers( gsClass );
for( Statement stmt = peekStatement(); stmt != null; stmt = peekStatement() )
{
stmt = popStatement();
IType enhancedType = enhancement.getEnhancedType();
if( stmt instanceof FunctionStatement )
{
FunctionStatement func = (FunctionStatement)stmt;
if( func.getDynamicFunctionSymbol() != null && !(enhancedType instanceof ErrorType) )
{
ITypeInfo typeInfo = enhancedType.getTypeInfo();
if( typeInfo != null )
{
IMethodInfo mi = typeInfo instanceof IRelativeTypeInfo
? ((IRelativeTypeInfo)typeInfo).getMethod( enhancement, func.getFunctionName(), func.getDynamicFunctionSymbol().getArgTypes() )
: typeInfo.getMethod( func.getFunctionName(), func.getDynamicFunctionSymbol().getArgTypes() );
if( overridesMethodWithDefaultParams(func, typeInfo) )
{
addDeclaredNameParseError( func, Res.MSG_OVERLOADING_NOT_ALLOWED_WITH_OPTIONAL_PARAMS, mi.getDisplayName(), enhancedType.getRelativeName() );
}
else if( (mi != null) && (!featureIsOwnedByEnhancement( enhancement, mi ) || (enhancedType != JavaTypes.OBJECT() && GosuClass.isObjectMethod( mi ))) )
{
addDeclaredNameParseError( func, Res.MSG_CANNOT_OVERRIDE_FUNCTIONS_IN_ENHANCEMENTS, mi.getDisplayName(), enhancedType.getRelativeName() );
}
else if( enhancedType instanceof IGosuClass )
{
String name = func.getFunctionName();
DynamicFunctionSymbol dfs = func.getDynamicFunctionSymbol();
if( name.startsWith( "set" ) && dfs.getArgs().size() == 1 )
{
ITypeInfo ti = enhancedType.getTypeInfo();
IPropertyInfo pi = ((IRelativeTypeInfo)ti).getProperty( enhancement, name.substring( 3, name.length() ) );
if( pi instanceof GosuPropertyInfo )
{
ReducedDynamicPropertySymbol dps = ((GosuPropertyInfo)pi).getDps();
if( dps.getSetterDfs() != null )
{
IType argType = dfs.getArgs().get( 0 ).getType();
if( argType.equals( dps.getType() ) )
{
addDeclaredNameParseError( func, Res.MSG_PROPERTY_AND_FUNCTION_CONFLICT, dfs.getName(), dps.getName() );
}
else if( getOwner().doTypesReifyToTheSameBytecodeType( argType, dps.getType() ) )
{
addDeclaredNameParseError( func, Res.MSG_PROPERTY_AND_FUNCTION_CONFLICT_UPON_REIFICATION, dfs.getName(), dps.getName() );
}
}
}
}
else if( (name.startsWith( "get" ) || name.startsWith( "is" )) && dfs.getArgs().size() == 0 )
{
ITypeInfo ti = enhancedType.getTypeInfo();
IPropertyInfo pi = ((IRelativeTypeInfo)ti).getProperty( enhancement, name.substring( name.startsWith( "get" ) ? 3 : 2, name.length() ) );
if( pi instanceof GosuPropertyInfo )
{
ReducedDynamicPropertySymbol dps = ((GosuPropertyInfo)pi).getDps();
if( dps.getGetterDfs() != null )
{
addDeclaredNameParseError( func, Res.MSG_PROPERTY_AND_FUNCTION_CONFLICT, dfs.getName(), dps.getName() );
}
}
}
}
}
}
}
else if( stmt instanceof PropertyStatement )
{
PropertyStatement prop = (PropertyStatement)stmt;
ITypeInfo typeInfo = enhancedType.getTypeInfo();
if( typeInfo != null && !(enhancedType instanceof ErrorType) )
{
IPropertyInfo pi = typeInfo instanceof IRelativeTypeInfo
? ((IRelativeTypeInfo)typeInfo).getProperty( enhancement, prop.getFunctionName() )
: typeInfo.getProperty( prop.getFunctionName() );
if( pi != null && !featureIsOwnedByEnhancement( enhancement, pi ) )
{
addDeclaredNameParseError( prop, Res.MSG_CANNOT_OVERRIDE_PROPERTIES_IN_ENHANCEMENTS, pi.getDisplayName(), enhancedType.getRelativeName() );
}
else
{
FunctionStatement funcStmt = prop.getPropertyGetterOrSetter();
DynamicFunctionSymbol dfs = funcStmt.getDynamicFunctionSymbol();
String name = dfs.getDisplayName().substring( 1 );
if( dfs.getArgs().size() == 0 )
{
ITypeInfo ti = enhancedType.getTypeInfo();
IMethodInfo mi = ((IRelativeTypeInfo)ti).getMethod( enhancement, "get" + name );
mi = mi == null ? ((IRelativeTypeInfo)ti).getMethod( enhancement, "is" + name ) : mi;
if( mi != null )
{
addDeclaredNameParseError( prop, Res.MSG_PROPERTY_AND_FUNCTION_CONFLICT, mi.getName(), name );
}
}
else if( funcStmt.getParameters().size() > 0 )
{
ITypeInfo ti = enhancedType.getTypeInfo();
for( IMethodInfo mi: ((IRelativeTypeInfo)ti).getMethods( enhancement ) )
{
if( mi.getDisplayName().equals( "set" + name ) && mi.getParameters().length == 1 )
{
IType argType = mi.getParameters()[0].getFeatureType();
if( argType.equals( dfs.getArgTypes()[0] ) )
{
addDeclaredNameParseError( prop, Res.MSG_PROPERTY_AND_FUNCTION_CONFLICT, mi.getName(), dfs.getName() );
}
else if( getOwner().doTypesReifyToTheSameBytecodeType( argType, dfs.getArgTypes()[0] ) )
{
addDeclaredNameParseError( prop, Res.MSG_PROPERTY_AND_FUNCTION_CONFLICT, mi.getName(), dfs.getName() );
}
}
}
}
}
}
}
else if( !(stmt instanceof NoOpStatement ||
stmt instanceof NamespaceStatement ||
stmt instanceof UsesStatement) )
{
ParseException parseException = new ParseException( stmt.getLineNum(), 1, stmt.getLocation().getColumn(), stmt.getLocation().getOffset(), stmt.getLocation().getExtent(),
getSymbolTable(), Res.MSG_ENHANCEMENT_DOES_NOT_ACCEPT_THIS_STATEMENT );
stmt.addParseException( parseException );
}
}
verify( getClassStatement(), match( null, '}' ), Res.MSG_EXPECTING_CLOSE_BRACE_FOR_CLASS_DEF );
}
finally
{
getSymbolTable().popScope();
}
}
void addDeclaredNameParseError( IParsedElementWithAtLeastOneDeclaration stmt, ResourceKey key, Object... args )
{
int nameOffset = stmt.getNameOffset( null );
ParseException parseException = new ParseException( stmt.getLineNum(), 1, stmt.getLocation().getColumn(), nameOffset, nameOffset + ((stmt instanceof VarStatement) ? ((VarStatement)stmt).getIdentifierName().length() : stmt.getFunctionName().length()),
getSymbolTable(), key, args );
stmt.addParseException( parseException );
}
private boolean overridesMethodWithDefaultParams(FunctionStatement func, ITypeInfo typeInfo) {
if( !(typeInfo instanceof IRelativeTypeInfo) )
{
return false;
}
IRelativeTypeInfo rti = (IRelativeTypeInfo) typeInfo;
for( IMethodInfo mi : rti.getMethods( func.getGosuClass() ) )
{
if( mi.getDisplayName().equals( func.getFunctionName() ) && mi instanceof GosuMethodInfo && !featureIsOwnedByEnhancement( func.getGosuClass(), mi ) )
{
final ReducedDynamicFunctionSymbol dfs0 = ((GosuMethodInfo) mi).getDfs();
final DynamicFunctionSymbol dfs1 = func.getDynamicFunctionSymbol();
return dfs0 != null && dfs1 != null && (((IInvocableType) dfs0.getType()).hasOptionalParams() || dfs1.hasOptionalParameters());
}
}
return false;
}
private boolean featureIsOwnedByEnhancement( IGosuClass enhancement, IFeatureInfo iMethodInfo )
{
if( !(enhancement instanceof IGosuEnhancementInternal) )
{
return false;
}
IType ownerType = iMethodInfo.getOwnersType();
if( ownerType != null && ownerType.isParameterizedType() )
{
ownerType = ownerType.getGenericType();
}
IType enhancementType = enhancement;
if( enhancementType != null && enhancementType.isParameterizedType() )
{
enhancementType = enhancementType.getGenericType();
}
if( enhancementType instanceof IGosuEnhancementInternal &&
ownerType instanceof IGosuEnhancementInternal )
{
return GosuObjectUtil.equals( enhancementType.getName(), ownerType.getName() );
}
else
{
return GosuObjectUtil.equals( enhancementType, ownerType );
}
}
String parseHeader( IGosuClassInternal gsClass, boolean bParseEnhancementOnly, boolean bIsAnonymous, boolean bResolveUsesTypes )
{
boolean bPushedScope = pushScopeIfNeeded( gsClass );
if( gsClass.isHeaderCompiled() )
{
((CompilationState)gsClass.getCompilationState()).setReparsingHeader( true );
}
else
{
gsClass.setCompilingHeader( true );
}
getTokenizer().pushOffsetMarker( this );
gsClass.createNewParseInfo();
setClassStatement( gsClass.getParseInfo().getClassStatement() );
ScriptPartId scriptPartId = new ScriptPartId( gsClass, null );
getOwner().pushScriptPart( scriptPartId );
GosuClassCompilingStack.pushCompilingType( gsClass );
try
{
setTokenizerToClassStart();
if( match( null, SourceCodeTokenizer.TT_EOF ) )
{
if( gsClass instanceof IGosuProgram )
{
// Let empty *program* source parse
//## todo: cache and reuse empty program class
gsClass.setSuperType( JavaTypes.OBJECT() );
}
else if( getClassStatement() != null && getClassStatement().getClassFileStatement() != null )
{
getClassStatement().getClassFileStatement().addParseException( new ParseException( makeFullParserState(), Res.MSG_NO_SOURCE_FOUND ) );
}
return null;
}
getOwner().checkInstruction( true );
if( gsClass instanceof IGosuProgram )
{
getOwner().parseProgramClasspathStatements();
getOwner().parseProgramTypeLoaderStatements();
}
getOwner().checkInstruction( true );
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
if( match( null, Keyword.KW_package ) )
{
getOwner().parseNamespaceStatement();
setLocation( iOffset, iLineNum, iColumn );
popStatement();
}
else if( gsClass instanceof IGosuProgram )
{
ISourceFileHandle sfh = gsClass.getSourceFileHandle();
boolean bEval = sfh instanceof StringSourceFileHandle;
if( bEval )
{
ITypeUsesMap typeUsesMap = ((StringSourceFileHandle)sfh).getTypeUsesMap();
if( typeUsesMap != null )
{
getOwner().setTypeUsesMap( typeUsesMap );
}
}
if( gsClass.isAnonymous() )
{
// Anonymous implies Eval program...
gsClass.setEnclosingType( TypeSystem.getByFullNameIfValid( sfh.getParentType() ) );
IType enclosingType = gsClass.getEnclosingTypeReference();
getOwner().setNamespace(enclosingType.getNamespace());
Map<String, ITypeVariableDefinition> capturedTypeVars = bEval ? ((StringSourceFileHandle)sfh).getCapturedTypeVars() : null;
if( capturedTypeVars != null )
{
getOwner().getTypeVariables().putAll( capturedTypeVars );
}
}
else
{
String strNamespace = getGosuClass().getNamespace();
getOwner().setNamespace( strNamespace != null && !strNamespace.isEmpty() ? strNamespace : IGosuProgram.PACKAGE );
}
}
else if( !isInnerClass( gsClass ) )
{
getOwner().setNamespace( "" );
}
getOwner().checkInstruction( true );
getOwner().parseUsesStatementList( bResolveUsesTypes );
if( gsClass.getEnclosingType() == null )
{
// Inner classes start parsing right at the class-stmt, so they must
// get at the uses map from the top-level enclosing class
gsClass.setTypeUsesMap(getOwner().getTypeUsesMap());
}
ClassType classType;
if( gsClass.isAnonymous() && !(gsClass instanceof IGosuProgram) )
{
try
{
classType = parseAnonymousClassHeader( gsClass );
}
catch( InnerClassNotFoundException e )
{
classType = ClassType.Class;
}
_iClassOffset = getTokenizer().getTokenStart();
_iClassLineNum = getTokenizer().getLineNumber();
_iClassColumn = getTokenizer().getTokenColumn();
}
else if( gsClass instanceof IGosuProgram )
{
gsClass.setModifierInfo(new ModifierInfo(Modifier.PUBLIC | Modifier.FINAL));
if( gsClass.isAnonymous() ) // generated 'eval' program
{
final IParsedElement enclosingEvalExpression = ((IGosuProgram) gsClass).getEnclosingEvalExpression();
IParseTree parseTree = enclosingEvalExpression == null ? null : enclosingEvalExpression.getLocation();
IFunctionStatement fs = (parseTree == null ? null : parseTree.getEnclosingFunctionStatement());
if( (fs != null && fs.getDynamicFunctionSymbol().isStatic()) ||
// Note a null enclosingEvalExpression implies this anon program is a bare expression that is artificially
// executed as though it were defined somewhere within the enclosing class e.g., an old-style Gosu annotation,
// therefore the expression needs private access to the outer class and must be compiled as a static,
// yet anonymous, inner class
enclosingEvalExpression == null )
{
((ModifierInfo)gsClass.getModifierInfo()).addModifiers( Modifier.STATIC );
}
}
// Optional 'extends' clause for specifying Super Class for a program
parseProgramExtendsStatement( gsClass, bResolveUsesTypes );
classType = ClassType.Class;
}
else
{
getOwner().checkInstruction( true );
_iClassOffset = getTokenizer().getTokenStart();
_iClassLineNum = getTokenizer().getLineNumber();
_iClassColumn = getTokenizer().getTokenColumn();
if( !bIsAnonymous )
{
classType = parseClassType( gsClass, true );
if( classType == ClassType.Interface || classType == ClassType.Structure || classType == ClassType.Annotation )
{
((ModifierInfo)gsClass.getModifierInfo()).addModifiers( Modifier.ABSTRACT );
}
else if( classType == ClassType.Enum )
{
((ModifierInfo)gsClass.getModifierInfo()).addModifiers( Modifier.FINAL );
}
if( classType == ClassType.Annotation )
{
gsClass.addInterface( JavaTypes.ANNOTATION() );
}
}
else
{
classType = parseClassTypeForHeader( gsClass );
}
}
if( classType == null )
{
if( bParseEnhancementOnly )
{
return null;
}
verify( getClassStatement(), false, Res.MSG_EXPECTING_NAME_CLASS_DEF );
}
if( classType == ClassType.Enhancement )
{
if( gsClass instanceof IGosuEnhancementInternal )
{
IGosuEnhancementInternal scriptEnhancement = (IGosuEnhancementInternal)gsClass;
scriptEnhancement.setFoundCorrectHeader();
return parseEnhancementHeaderSuffix( scriptEnhancement );
}
else
{
getClassStatement().addParseException( new ParseException( makeFullParserState(), Res.MSG_MUST_BE_DEFINED_AS_CLASS ) );
return null;
}
}
else if( classType != null && !bParseEnhancementOnly )
{
if( classType == ClassType.Enum )
{
gsClass.setEnum();
}
return parseClassOrInterfaceHeaderSuffix( gsClass, classType, bResolveUsesTypes );
}
else
{
return null;
}
}
finally
{
boolean bHeaderCompiled;
try
{
bHeaderCompiled = gsClass.isHeaderCompiled();
if( !bHeaderCompiled )
{
parseInnerClassHeaders( gsClass, bResolveUsesTypes );
}
}
finally
{
GosuClassCompilingStack.popCompilingType();
}
getOwner().popScriptPart( scriptPartId );
((CompilationState)gsClass.getCompilationState()).setReparsingHeader( false );
gsClass.setCompilingHeader( false );
gsClass.setHeaderCompiled();
popScopeIfNeeded( bPushedScope, gsClass );
getTokenizer().popOffsetMarker( this );
if( !bHeaderCompiled )
{
removeTypeVarsFromParserMap( gsClass );
}
}
}
private void removeTypeVarsFromParserMap( IGosuClassInternal gsClass )
{
for( IGenericTypeVariable gtv : gsClass.getGenericTypeVariables() )
{
ITypeVariableDefinition typeVarDef = gtv.getTypeVariableDefinition();
Map<String, ITypeVariableDefinition> typeVarMap = getOwner().getTypeVariables();
if( typeVarMap.containsValue( typeVarDef ) )
{
typeVarMap.remove( typeVarDef.getName() );
}
}
}
private boolean pushScopeIfNeeded( final IGosuClassInternal gsClass )
{
ISymbolTable compilingClass = CompiledGosuClassSymbolTable.instance().getSymbolTableForCompilingClass( gsClass );
if( compilingClass != null )
{
return false;
}
// *barf*
if( gsClass.getParser() != null )
{
CompiledGosuClassSymbolTable.instance().pushCompileTimeSymbolTable( gsClass, gsClass.getParser().getSymbolTable() );
}
else
{
CompiledGosuClassSymbolTable.instance().pushCompileTimeSymbolTable( gsClass );
}
getSymbolTable().pushIsolatedScope(new GosuClassTransparentActivationContext(gsClass, false));
return true;
}
private void popScopeIfNeeded( boolean bPop, IGosuClass gsClass )
{
if( bPop )
{
getSymbolTable().popScope();
CompiledGosuClassSymbolTable.instance().popCompileTimeSymbolTable( gsClass );
}
}
private void setTokenizerToClassStart()
{
if( isInnerClass( getGosuClass() ) )
{
getTokenizer().reset();
}
if( !getTokenizer().isPositioned() )
{
getTokenizer().nextToken();
}
}
private ClassType parseAnonymousClassHeader( IGosuClassInternal gsClass )
{
ClassType classType = ClassType.Class;
ParsedElement elem;
if( match( null, null, '(', true ) )
{
elem = getClassStatement();
}
else if( !getOwner().parseTypeLiteral() )
{
throw new InnerClassNotFoundException();
}
else
{
elem = popExpression();
}
eatParenthesized( elem, Res.MSG_EXPECTING_FUNCTION_CLOSE );
//getLocationsList().remove( superTypeLiteral.getLocation() ); // rely on the new-expr to keep the type literal *it* parses
return classType;
}
private boolean goToPosition( int iOffset )
{
try
{
getTokenizer().goToPosition( iOffset );
return true;
}
catch( IOException e )
{
//noinspection ThrowableResultOfMethodCallIgnored
getClassStatement().addParseException( ParseException.wrap( e, makeFullParserState() ) );
}
return false;
}
private ClassType parseClassTypeForHeader( IGosuClassInternal gsClass )
{
while( true )
{
if( match( null, SourceCodeTokenizer.TT_EOF ) )
{
return null;
}
ClassType classType = parseClassType( gsClass, !gsClass.isDeclarationsCompiled() );
if( classType != null )
{
return classType;
}
getTokenizer().nextToken();
}
}
private ClassType parseClassType( IGosuClassInternal gsClass, boolean bSetModifiers )
{
ModifierInfo modifiers = parseModifiersForClass( gsClass, bSetModifiers );
if( !Modifier.isInternal( modifiers.getModifiers() )
&& !Modifier.isProtected( modifiers.getModifiers() )
&& !Modifier.isPrivate( modifiers.getModifiers() ) )
{
modifiers.addModifiers( Modifier.PUBLIC );
}
ClassType classType = null;
if( match( null, Keyword.KW_enhancement ) )
{
classType = ClassType.Enhancement;
if( bSetModifiers )
{
verify( getClassStatement(), !Modifier.isPrivate( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_private, classType.name() );
verify( getClassStatement(), !Modifier.isProtected( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_protected, classType.name() );
verify( getClassStatement(), !Modifier.isInternal( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_internal, classType.name() );
verify( getClassStatement(), !Modifier.isFinal( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_final, classType.name() );
verify( getClassStatement(), !Modifier.isAbstract( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_abstract, classType.name() );
verify( getClassStatement(), !Modifier.isTransient( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_transient, classType.name() );
verifyNoAbstractHideOverrideStaticModifierDefined( getClassStatement(), false, modifiers.getModifiers(), Keyword.KW_enhancement );
gsClass.setModifierInfo(modifiers);
}
}
else if( match( null, Keyword.KW_interface ) )
{
classType = ClassType.Interface;
if( bSetModifiers )
{
verify( getClassStatement(), !Modifier.isHide( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_hide, classType.name() );
verify( getClassStatement(), !Modifier.isOverride( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_override, classType.name() );
verify( getClassStatement(), !Modifier.isFinal( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_final, classType.name() );
verify( getClassStatement(), !Modifier.isTransient( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_transient, classType.name() );
if( gsClass.getEnclosingType() != null )
{
modifiers.addModifiers( Modifier.STATIC );
}
gsClass.setModifierInfo(modifiers);
}
}
else if( match( null, Keyword.KW_structure ) )
{
classType = ClassType.Structure;
if( bSetModifiers )
{
verify( getClassStatement(), !Modifier.isHide( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_hide, classType.name() );
verify( getClassStatement(), !Modifier.isOverride( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_override, classType.name() );
verify( getClassStatement(), !Modifier.isFinal( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_final, classType.name() );
verify( getClassStatement(), !Modifier.isTransient( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_transient, classType.name() );
if( gsClass.getEnclosingType() != null )
{
modifiers.addModifiers( Modifier.STATIC );
}
gsClass.setModifierInfo(modifiers);
}
}
else if( match( null, Keyword.KW_annotation ) )
{
classType = ClassType.Annotation;
if( bSetModifiers )
{
verify( getClassStatement(), !Modifier.isHide( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_hide, classType.name() );
verify( getClassStatement(), !Modifier.isOverride( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_override, classType.name() );
verify( getClassStatement(), !Modifier.isFinal( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_final, classType.name() );
verify( getClassStatement(), !Modifier.isTransient( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_transient, classType.name() );
modifiers.addModifiers( Modifier.ANNOTATION );
if( gsClass.getEnclosingType() != null )
{
modifiers.addModifiers( Modifier.STATIC );
}
gsClass.setModifierInfo(modifiers);
}
}
else if( match( null, Keyword.KW_class ) )
{
classType = ClassType.Class;
if( bSetModifiers )
{
verify( getClassStatement(), !Modifier.isHide( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_hide, classType.name() );
verify( getClassStatement(), !Modifier.isOverride( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_override, classType.name() );
verify( getClassStatement(), !Modifier.isTransient( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_transient, classType.name() );
gsClass.setModifierInfo(modifiers);
}
}
else if( match( null, Keyword.KW_enum ) )
{
classType = ClassType.Enum;
if( bSetModifiers )
{
verifyNoAbstractHideOverrideModifierDefined( getClassStatement(), false, modifiers.getModifiers(), Keyword.KW_final );
verify( getClassStatement(), !Modifier.isFinal( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_final, classType.name() );
verify( getClassStatement(), !Modifier.isTransient( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_transient, classType.name() );
gsClass.setModifierInfo(modifiers);
}
}
if( gsClass.shouldFullyCompileAnnotations() )
{
verifyModifiers( getClassStatement(), modifiers, UsageTarget.TypeTarget );
}
gsClass.setFullDescription( modifiers.getDescription() );
if( bSetModifiers && classType != null && gsClass.getEnclosingType() == null )
{
verify( getClassStatement(), !Modifier.isPrivate( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_private, classType.name() );
verify( getClassStatement(), !Modifier.isProtected( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_protected, classType.name() );
verify( getClassStatement(), !Modifier.isStatic( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_static, classType.name() );
}
return classType;
}
private ModifierInfo parseModifiersForClass( IGosuClassInternal gsClass, boolean bSetModifiers )
{
ModifierInfo modifiers;ICompilableTypeInternal enclosingType = gsClass.getEnclosingType();
if( enclosingType instanceof IGosuClassInternal && ((IGosuClassInternal)enclosingType).isDeclarationsCompiled() )
{
// push static class symbols for annotations (they are part of modifier parsing)
ClassScopeCache scopeCache = makeClassScopeCache( (IGosuClassInternal)enclosingType );
pushClassSymbols( true, scopeCache );
try
{
modifiers = parseModifiers( !bSetModifiers );
}
finally
{
popClassSymbols();
}
}
else
{
modifiers = parseModifiers( !bSetModifiers );
}
return modifiers;
}
private String parseClassOrInterfaceHeaderSuffix( IGosuClassInternal gsClass, ClassType classType, boolean bResolveTypes )
{
String strClassName;
IGosuClassInternal gosuObjectInterface = getGosuObjectInterface();
if (gosuObjectInterface == null) {
return gsClass.getName();
}
if( gsClass instanceof IGosuProgram )
{
gsClass.addInterface(gosuObjectInterface);
if( !gsClass.isAnonymous() )
{
IType type = parseEnhancedOrImplementedType( gsClass, false, Collections.<IType>emptyList() );
gsClass.setSuperType( type );
}
strClassName = gsClass.getName();
}
else if( gsClass.isAnonymous() )
{
gsClass.addInterface(gosuObjectInterface);
strClassName = gsClass.getName();
if( gsClass.isHeaderCompiled() )
{
ClassDeclaration classDeclaration = new ClassDeclaration( strClassName );
pushExpression( classDeclaration );
SourceCodeTokenizer tokenizer = getOwner().getTokenizer();
setLocation( tokenizer.getTokenStart(), tokenizer.getLineNumber(), tokenizer.getTokenColumn(), true, true );
popExpression();
getClassStatement().setClassDeclaration( classDeclaration );
// makeSyntheticClassDeclaration( strClassName, false );
}
}
else
{
boolean bStructure = classType.equals( ClassType.Structure );
boolean bAnnotation = classType.equals( ClassType.Annotation );
boolean bInterface = bStructure || bAnnotation || classType.equals( ClassType.Interface );
gsClass.setInterface( bInterface );
gsClass.setStructure( bStructure );
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
Token t = new Token();
verify( getClassStatement(), match( t, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_NAME_CLASS_DEF );
strClassName = t._strValue;
String strNamespace;
if( isTopLevelClass( getGosuClass() ) )
{
strNamespace = getOwner().getNamespace();
}
else
{
strNamespace = getGosuClass().getEnclosingType().getName();
}
strClassName = GosuStringUtil.isEmpty(strNamespace)
? strClassName
: strNamespace + '.' + strClassName;
if( gsClass.getEnclosingTypeReference() == null && strClassName != null && !strClassName.equals( gsClass.getName() ) )
{
verify( getClassStatement(), false, Res.MSG_WRONG_CLASSNAME, strClassName, gsClass.getName() );
}
if( strClassName != null && gsClass.isHeaderCompiled() )
{
ClassDeclaration classDeclaration = new ClassDeclaration( strClassName );
pushExpression( classDeclaration );
setLocation( iOffset, iLineNum, iColumn );
popExpression();
getClassStatement().setClassDeclaration( classDeclaration );
}
List<ITypeVariableDefinitionExpression> typeVarLiteralList = getOwner().parseTypeVariableDefs( getClassStatement(), false, getDeclTypeVars() );
gsClass.setGenericTypeVariables((List)typeVarLiteralList);
if( gsClass.isEnum() )
{
verify( getClassStatement(), typeVarLiteralList.isEmpty(), Res.MSG_ENUM_MAY_NOT_HAVE_TYPEPARAM );
}
iOffset = getTokenizer().getTokenStart();
iLineNum = getTokenizer().getLineNumber();
iColumn = getTokenizer().getTokenColumn();
if( !bInterface && (match( null, Keyword.KW_extends ) || gsClass.isEnum()) )
{
IType superType = parseEnhancedOrImplementedType( gsClass, true, Collections.<IType>emptyList() );
if( superType instanceof IGosuClassInternal )
{
if( bResolveTypes )
{
((IGosuClassInternal)superType).compileDeclarationsIfNeeded();
}
}
gsClass.setSuperType( superType );
if( gsClass.getCompilationState().isCompilingDeclarations() &&
gsClass.isGenericType() )
{
verify( getClassStatement(), !JavaTypes.THROWABLE().isAssignableFrom( superType ) , Res.MSG_INVALID_GENERIC_EXCEPTION );
}
SuperTypeClause extendsClause = new SuperTypeClause( superType );
pushExpression( extendsClause );
if( gsClass.isDeclarationsCompiled() )
{
verifySuperTypeVarVariance( getClassStatement(), superType );
}
setLocation( iOffset, iLineNum, iColumn );
popExpression();
iOffset = getTokenizer().getTokenStart();
iLineNum = getTokenizer().getLineNumber();
iColumn = getTokenizer().getTokenColumn();
}
boolean hasImplements = false;
if( (bInterface && match( null, Keyword.KW_extends )) ||
(hasImplements = match( null, Keyword.KW_implements )) )
{
if( verify( getClassStatement(), !bInterface || !hasImplements, Res.MSG_NO_IMPLEMENTS_ALLOWED ) )
{
verify( getClassStatement(), !bAnnotation, Res.MSG_NO_EXTENDS_ALLOWED );
}
List<IType> interfaces = new ArrayList<IType>();
do
{
IType type = parseEnhancedOrImplementedType( gsClass, bInterface, interfaces );
gsClass.addInterface( type );
if( gsClass.isDeclarationsCompiled() )
{
verifySuperTypeVarVariance( getClassStatement(), type );
}
interfaces.add( type );
} while( match( null, ',' ) );
InterfacesClause interfacesClause = new InterfacesClause( gsClass, interfaces.toArray( new IType[interfaces.size()] ) );
pushExpression( interfacesClause );
setLocation( iOffset, iLineNum, iColumn );
popExpression();
}
if( classType == ClassType.Class || classType == ClassType.Interface || classType == ClassType.Structure )
{
IGosuClassInternal gsObjectInterace = gosuObjectInterface;
if( (!gsClass.isInterface() || !interfaceExtendsGosuObject( gsClass, gsObjectInterace )) && !gsClass.getName().startsWith( IGosuClass.PROXY_PREFIX ) )
{
gsClass.addInterface( gsObjectInterace );
}
}
else if( classType == ClassType.Enum )
{
gsClass.addInterface(gosuObjectInterface);
}
}
if( (isTopLevelClass( gsClass ) ||
gsClass instanceof IGosuProgram ||
// Anonymous classes can have inner classes
gsClass.isAnonymous()) &&
!gsClass.isHeaderCompiled() )
{
// Recursively *load* (no parsing) all nested inner types from the top-level class file
int state = getTokenizer().mark();
loadAllNestedInnerClasses( gsClass );
getTokenizer().restoreToMark( state );
}
return strClassName;
}
private boolean interfaceExtendsGosuObject( IGosuClassInternal gsClass, IGosuClassInternal gsObjectInterace )
{
if( gsClass == gsObjectInterace )
{
return true;
}
for( IType iface: gsClass.getInterfaces() )
{
if( iface instanceof IGosuClass )
{
return true;
}
}
return false;
}
private List<TypeVariableDefinitionImpl> getDeclTypeVars()
{
IGosuClass gsClass = getGosuClass();
if( !gsClass.isDeclarationsCompiled() )
{
return Collections.emptyList();
}
IGenericTypeVariable[] typeVars = gsClass.getGenericTypeVariables();
if( typeVars == null )
{
return Collections.emptyList();
}
List<TypeVariableDefinitionImpl> result = new ArrayList<TypeVariableDefinitionImpl>( typeVars.length );
for( IGenericTypeVariable typeVar : typeVars )
{
result.add( (TypeVariableDefinitionImpl)typeVar.getTypeVariableDefinition() );
}
return result;
}
private void makeSyntheticClassDeclaration( String strClassName, boolean bProgram )
{
ClassDeclaration classDeclaration = new ClassDeclaration( strClassName );
pushExpression( classDeclaration );
SourceCodeTokenizer tokenizer = getOwner().getTokenizer();
setLocation( bProgram ? 0 : tokenizer.getTokenStart(), tokenizer.getLineNumber(), bProgram ? 0 : tokenizer.getTokenColumn(), true, true );
popExpression();
getClassStatement().setClassDeclaration( classDeclaration );
}
private void parseInnerClassHeaders( IGosuClassInternal gsClass, boolean bResolveTypes )
{
Map<CharSequence, ? extends IGosuClass> innerClassesByName = gsClass.getKnownInnerClassesWithoutCompiling();
if( innerClassesByName.isEmpty() )
{
return;
}
int state = getTokenizer().mark();
int iLocationsSize = getLocationsList().size();
try
{
for( CharSequence name : innerClassesByName.keySet() )
{
IGosuClassInternal innerClass = (IGosuClassInternal)innerClassesByName.get( name );
if( !(innerClass instanceof IBlockClass) )
{
innerClass.createNewParseInfo();
new GosuClassParser( getOwner(), innerClass ).parseHeader( innerClass, false, false, bResolveTypes );
}
}
}
finally
{
while( getLocationsList().size() > iLocationsSize )
{
getLocationsList().remove( getLocationsList().size()-1 );
}
getTokenizer().restoreToMark( state );
}
}
private void loadAllNestedInnerClasses( IGosuClassInternal gsClass )
{
String[] strMemberKeyword = new String[1];
if( !(gsClass instanceof IGosuProgram) )
{
advanceToClassBodyStart();
}
ModifierInfo modifiers;
while( true )
{
int[] location = new int[3];
int[] mark = new int[]{-1};
modifiers = parseUntilMemberKeyword( strMemberKeyword, true, -1, location, mark );
if( modifiers.getModifiers() == -1 )
{
if( getTokenizer().isEOF() )
{
break;
}
if( !isInnerClassesEmpty() ) // can be empty e.g., errors with unmatching braces
{
IGosuClassInternal innerClass = popInnerClass( getCurrentInnerClass() );
innerClass.getSourceFileHandle().setEnd( location[0] );
}
else if( gsClass.isAnonymous() )
{
break;
}
}
else
{
ClassType classType = getClassType( strMemberKeyword[0] );
if( classType != null )
{
IGosuClassInternal innerClass = loadNextInnerClass( gsClass, classType );
if( innerClass == null )
{
break;
}
innerClass.getSourceFileHandle().setOffset( location[0] );
((InnerClassFileSystemSourceFileHandle)innerClass.getSourceFileHandle()).setMark( mark[0] );
pushInnerClass( innerClass );
}
}
}
}
private ClassType getClassType( String strValue )
{
return
Keyword.KW_class.toString().equals( strValue )
? ClassType.Class
: Keyword.KW_interface.equals( strValue )
? ClassType.Interface
: Keyword.KW_annotation.equals( strValue )
? ClassType.Annotation
: Keyword.KW_structure.equals( strValue )
? ClassType.Structure
: Keyword.KW_enum.toString().equals( strValue )
? ClassType.Enum
: null;
}
private void advanceToClassBodyStart()
{
while( true )
{
if( match( null, '{' ) )
{
break;
}
if( match( null, SourceCodeTokenizer.TT_EOF ) )
{
break;
}
getTokenizer().nextToken();
}
}
private void advanceToClassBodyEnd()
{
int iEnd = getGosuClass().getSourceFileHandle().getEnd();
if( iEnd <= 0 )
{
//assert isTopLevelClass( getGosuClass() ) || isEvalClass() : "Inner class does not have an 'end' marker";
return;
}
try
{
//## perf: this is very slow, maybe use a tokenizer mark instead
getTokenizer().goToPosition( iEnd );
}
catch( IOException e )
{
throw new RuntimeException( e );
}
verify( getClassStatement(), match( null, '}' ), Res.MSG_EXPECTING_CLOSE_BRACE_FOR_CLASS_DEF );
}
private IGosuClassInternal loadNextInnerClass( IGosuClassInternal gsClass, ClassType classType )
{
Token T = new Token();
IGosuClassInternal enclosingGsClass = getGosuClass();
if( verify( getClassStatement(), match( T, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_NAME_CLASS_DEF ) )
{
enclosingGsClass = getCurrentInnerClass() != null ? getCurrentInnerClass() : enclosingGsClass;
String strInnerClass = T._strValue;
IGosuClassInternal innerGsClass;
innerGsClass = (IGosuClassInternal)enclosingGsClass.getKnownInnerClassesWithoutCompiling().get( strInnerClass );
if( innerGsClass != null )
{
// Duplicate inner class name
getClassStatement().addParseException( new ParseException( makeFullParserState(), Res.MSG_DUPLICATE_CLASS_FOUND, strInnerClass ) );
strInnerClass = strInnerClass + "_duplicate_" + nextIndexOfErrantDuplicateInnerClass( enclosingGsClass, innerGsClass );
}
innerGsClass = (IGosuClassInternal)gsClass.getTypeLoader().makeNewClass(
new InnerClassFileSystemSourceFileHandle( classType, enclosingGsClass.getName(), strInnerClass, gsClass.isTestClass() ) );
innerGsClass.setEnclosingType( enclosingGsClass );
innerGsClass.setNamespace( enclosingGsClass.getNamespace() );
enclosingGsClass.addInnerClass( innerGsClass );
advanceToClassBodyStart();
return innerGsClass;
}
return null;
}
public int nextIndexOfErrantDuplicateInnerClass( IGosuClassInternal enclosingGsClass, IGosuClassInternal innerClass )
{
int iMax = -1;
String strName = innerClass.getRelativeName() + "_duplicate_";
while( true )
{
IType existingInnerClass = enclosingGsClass.getKnownInnerClassesWithoutCompiling().get( strName + ++iMax );
if( existingInnerClass == null )
{
return iMax;
}
}
}
private IGosuClassInternal getGosuObjectInterface()
{
return IGosuClassInternal.Util.getGosuClassFrom( JavaTypes.IGOSU_OBJECT() );
}
private String parseEnhancementHeaderSuffix( IGosuEnhancementInternal gsClass )
{
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
Token t = new Token();
verify( getClassStatement(), match( t, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_NAME_CLASS_DEF );
String strClassName = t._strValue;
strClassName = GosuStringUtil.isEmpty(getOwner().getNamespace())
? strClassName
: getOwner().getNamespace() + '.' + strClassName;
ClassDeclaration classDeclaration = new ClassDeclaration( strClassName );
pushExpression( classDeclaration );
setLocation( iOffset, iLineNum, iColumn );
popExpression();
getClassStatement().setClassDeclaration( classDeclaration );
if( gsClass.getEnclosingTypeReference() == null && !strClassName.equals( gsClass.getName() ) )
{
verify( getClassStatement(), false, Res.MSG_WRONG_CLASSNAME, strClassName, gsClass.getName() );
}
List<ITypeVariableDefinitionExpression> typeVarLiteralList = getOwner().parseTypeVariableDefs( getClassStatement(), false, getDeclTypeVars() );
gsClass.setGenericTypeVariables((List)typeVarLiteralList);
verify( getClassStatement(), match( null, ":", SourceCodeTokenizer.TT_OPERATOR ), Res.MSG_EXPECTING_COLON_ENHANCEMENT );
IType enhancedType = parseEnhancedOrImplementedType( gsClass, true, Collections.<IType>emptyList() );
if( !(enhancedType instanceof ErrorType ||
enhancedType instanceof IEnhanceableType) )
{
verify( getClassStatement(), false, Res.MSG_NOT_AN_ENHANCEABLE_TYPE, enhancedType.getName() );
}
gsClass.setEnhancedType( enhancedType );
ensureEnhancedTypeUsesTypeVarsOfEnhancement( typeVarLiteralList, enhancedType );
return strClassName;
}
private void ensureEnhancedTypeUsesTypeVarsOfEnhancement( List<ITypeVariableDefinitionExpression> typeVarLiteralList, IType enhancedType )
{
if( typeVarLiteralList.isEmpty() )
{
return;
}
for( ITypeVariableDefinitionExpression expr: typeVarLiteralList )
{
boolean bReferencedByOtherTypeVar = false;
for( ITypeVariableDefinitionExpression expr2: typeVarLiteralList )
{
if( expr2 != expr )
{
if( hasTypeVar( expr2.getTypeVarDef().getBoundingType(), expr.getTypeVarDef().getType() ) )
{
bReferencedByOtherTypeVar = true;
break;
}
}
}
verify( getClassStatement(), bReferencedByOtherTypeVar || hasTypeVar( enhancedType, expr.getTypeVarDef().getType() ), Res.MSG_ENHANCED_TYPE_MUST_USE_ENHANCEMENT_TYPEVARS );
}
}
private boolean hasTypeVar( IType type, ITypeVariableType typeVar )
{
if( type == null )
{
return false;
}
if( type.isArray() )
{
type = TypeLord.getCoreType( type );
}
if( type == null || type.equals( typeVar ) )
{
return true;
}
if( type.isParameterizedType() )
{
for( IType typeParam: type.getTypeParameters() )
{
if( hasTypeVar( typeParam, typeVar ) )
{
return true;
}
}
}
return false;
}
void parseProgramExtendsStatement( IGosuClassInternal gsClass, boolean bResolveTypes )
{
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getLineOffset();
if( match( new Token(), Keyword.KW_extends ) )
{
IType superType = parseEnhancedOrImplementedType( gsClass, true, Collections.<IType>emptyList() );
SuperTypeClause stmt = new SuperTypeClause( superType );
if( superType instanceof IGosuClassInternal )
{
if( bResolveTypes )
{
((IGosuClassInternal)superType).compileDeclarationsIfNeeded();
}
}
pushExpression( stmt );
try
{
setLocation( iOffset, iLineNum, iColumn );
}
finally
{
popExpression();
}
ITypeInfo typeInfo = superType.getTypeInfo();
if( !(superType instanceof IErrorType) && typeInfo instanceof IRelativeTypeInfo )
{
IConstructorInfo noArgCtor = ((IRelativeTypeInfo)typeInfo).getConstructor( gsClass, null );
if( verify( stmt, noArgCtor != null, Res.MSG_NO_DEFAULT_CTOR_IN, superType.getName() ) )
{
gsClass.setSuperType( superType );
}
}
}
}
private IType parseEnhancedOrImplementedType( IGosuClassInternal gsClass, boolean bExtended, List<IType> interfaces )
{
IType extendedType = null;
TypeLiteral extendedTypeExpr = null;
if( gsClass instanceof IGosuProgram && !bExtended )
{
extendedType = gsClass.getSupertype() != null ? gsClass.getSupertype() : JavaTypes.OBJECT();
}
else if( !gsClass.isEnum() || !bExtended )
{
getOwner().parseTypeLiteral( !(gsClass instanceof IGosuEnhancementInternal) && (gsClass.isInterface() || !bExtended) );
extendedTypeExpr = (TypeLiteral)popExpression();
extendedType = extendedTypeExpr.getType().getType();
if( !verify( extendedTypeExpr, !extendedType.isCompoundType(), Res.MSG_COMPOUND_TYPE_NOT_ALLOWED_HERE ) )
{
extendedType = ErrorType.getInstance();
}
if( !(extendedType instanceof ErrorType) )
{
if( !(gsClass instanceof IGosuEnhancementInternal) )
{
if( gsClass.isInterface() )
{
verify( extendedTypeExpr, extendedType.isInterface(), Res.MSG_INTERFACE_CANNOT_EXTEND_CLASS );
}
else if( bExtended )
{
if( verify( extendedTypeExpr, !extendedType.isInterface(), Res.MSG_CLASS_CANNOT_EXTEND_INTERFACE ) )
{
verify( extendedTypeExpr, !gsClass.isEnum(), Res.MSG_ENUM_CANNOT_EXTEND_CLASS );
verify( extendedTypeExpr, extendedType != JavaTypes.OBJECT(), Res.MSG_SUBCLASS_OBJECT, gsClass.getRelativeName() );
verify( extendedTypeExpr, !extendedType.isArray(), Res.MSG_CANNOT_EXTEND_ARRAY, extendedType.getRelativeName() );
}
}
else
{
verify( extendedTypeExpr, extendedType.isInterface(), Res.MSG_CLASS_CANNOT_IMPLEMENT_CLASS );
}
verify( extendedTypeExpr, !extendedType.isPrimitive(), Res.MSG_CANNOT_EXTEND_PRIMITIVE_TYPE );
verify( extendedTypeExpr, !extendedType.isFinal(), Res.MSG_CANNOT_EXTEND_FINAL_TYPE, extendedType.getName() );
if( verify( extendedTypeExpr, !interfaces.contains( extendedType ), Res.MSG_DUPLICATE_CLASS_FOUND, extendedType.getRelativeName() ) )
{
IType[] conflict = inheritsWithDifferentTypeParams( gsClass.getSupertype(), interfaces, extendedType );
if( conflict != null )
{
extendedTypeExpr.addParseException( Res.MSG_INHEREITED_WITH_DIFF_ARG_TYPES, TypeLord.getPureGenericType( conflict[0] ).getName(), Arrays.toString( conflict[0].getTypeParameters() ) + " , " + Arrays.toString( conflict[1].getTypeParameters() ) );
}
}
if( isCyclicInheritance( extendedType, gsClass ) )
{
extendedType = ErrorType.getInstance( extendedType.getName() );
verify( extendedTypeExpr, false, Res.MSG_CYCLIC_INHERITANCE, extendedType.getName() );
}
}
else
{
if( extendedType instanceof IGosuEnhancementInternal )
{
verify( extendedTypeExpr, false, Res.MSG_ENHANCEMENTS_CANNOT_ENHANCE_OTHER_ENHANCEMENTS, extendedType.getName() );
}
}
}
}
else if( gsClass.isEnum() )
{
extendedType = JavaTypes.ENUM();
extendedType = extendedType.getParameterizedType( gsClass );
}
makeProxy( gsClass, extendedType );
extendedType = TypeLord.makeDefaultParameterizedType( extendedType );
if( !verify( extendedTypeExpr == null ? getClassStatement() : extendedTypeExpr,
(!extendedType.isGenericType() || extendedType instanceof IGosuClass && !((IGosuClass) extendedType).isHeaderCompiled()) ||
extendedType.isParameterizedType() || gsClass instanceof IGosuEnhancementInternal,
Res.MSG_CANNOT_EXTEND_RAW_GENERIC_TYPE, extendedType.getName() ) )
{
// If we are unable to resolve a parameterized type, extend the error type
extendedType = ErrorType.getInstance();
}
if( bExtended && !(gsClass instanceof IGosuEnhancementInternal) )
{
verify( extendedTypeExpr == null ? getClassStatement() : extendedTypeExpr,
Modifier.isStatic( extendedType.getModifiers() ) || extendedType.getEnclosingType() == null ||
TypeLord.enclosingTypeInstanceInScope( extendedType.getEnclosingType(), getGosuClass() ),
Res.MSG_NO_ENCLOSING_INSTANCE_IN_SCOPE, extendedType.getEnclosingType() );
}
if( !(extendedType instanceof ErrorType) )
{
if( gsClass.isDeclarationsCompiled() )
{
verifySuperTypeVarVariance( getClassStatement(), extendedType );
}
}
return extendedType;
}
private IType[] inheritsWithDifferentTypeParams( IType superType, List<IType> interfaces, IType iface )
{
if( superType != null )
{
IType[] conflict = inheritsWithDifferentTypeParams( null, Arrays.asList( superType.getInterfaces() ), iface );
if( conflict != null )
{
return conflict;
}
}
IType rawIface = TypeLord.getPureGenericType( iface );
for( IType csr: interfaces )
{
if( TypeLord.getPureGenericType( csr ) == rawIface && csr != iface )
{
return new IType[] {csr, iface};
}
IType[] conflict = inheritsWithDifferentTypeParams( null, Arrays.asList( csr.getInterfaces() ), iface );
if( conflict != null )
{
return conflict;
}
conflict = inheritsWithDifferentTypeParams( null, Arrays.asList( iface.getInterfaces() ), csr );
if( conflict != null )
{
return conflict;
}
}
return null;
}
private void makeProxy( IGosuClassInternal gsClass, IType extendedType )
{
if( !(gsClass instanceof IGosuEnhancementInternal) && extendedType instanceof IJavaType )
{
// Create a gosu class proxy for the java one.
// It is attached to the JavaType as its adapterClass.
GosuClassProxyFactory.instance().create( extendedType );
}
}
private Object parseFunctionOrConstructorOrFieldDeclaration( IGosuClassInternal gsClass )
{
int[] location = new int[3];
Object rtn = _parseFunctionOrConstructorOrFieldDeclaration( gsClass, location );
if( rtn != null )
{
setLocation( location[0], location[1], location[2] );
}
return rtn;
}
//------------------------------------------------------------------------------
//
// class-member-declaration
// <constructor-declaration>
// <function-declaration>
// <field-declaration>
//
// constructor-declaration
// [modifiers] function <class-name> ( [ <argument-declaration-list> ] )
//
// function-declaration
// [modifiers] function <identifier> ( [ <argument-declaration-list> ] ) [: <type-literal>]
//
// field-declaration
// [modifiers] var <identifier> [ : <type-literal> ] = <expression>
// [modifiers] var <identifier> : <type-literal> [ = <expression> ]
//
private Object _parseFunctionOrConstructorOrFieldDeclaration( IGosuClassInternal gsClass, int[] location )
{
String[] T = new String[1];
ModifierInfo modifiers;
boolean bInterface = gsClass.isInterface();
while( true )
{
modifiers = parseUntilMemberKeyword( T, false, -1, location );
if( modifiers.getModifiers() == -1 )
{
return null;
}
if( Keyword.KW_class.equals( T[0] ) ||
Keyword.KW_interface.equals( T[0] ) ||
Keyword.KW_annotation.equals( T[0] ) ||
Keyword.KW_structure.equals( T[0] ) ||
Keyword.KW_enum.equals( T[0] ) )
{
if( bInterface && Keyword.KW_enum.equals( T[0] ))
{
verify( getClassStatement(), !Modifier.isFinal( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_final, Keyword.KW_enum );
}
parseInnerClassDeclaration();
}
else
{
break;
}
}
if( bInterface )
{
modifiers.addModifiers( Modifier.PUBLIC );
}
if( T[0] != null &&
(Keyword.KW_function.equals( T[0] ) ||
Keyword.KW_construct.equals( T[0] )) )
{
String ctorNameToken = null;
boolean bConstructKeyword = false;
if( Keyword.KW_construct.equals( T[0] ) )
{
T[0] = gsClass.getRelativeName();
ctorNameToken = T[0];
bConstructKeyword = true;
}
else
{
int mark = getTokenizer().mark();
if( match( null, null, SourceCodeTokenizer.TT_WORD, true ) )
{
T[0] = getTokenizer().getTokenAt( mark ).getStringValue();
}
}
FunctionStatement fs = makeFunctionOrConstructorStatement( gsClass, T[0], bConstructKeyword );
IParserState constructOrFunctionState = makeLazyLightweightParserState();
verify( fs, !(gsClass instanceof IGosuProgramInternal) || !((IGosuProgramInternal)gsClass).isStatementsOnly(),
Res.MSG_FUNCTIONS_NOT_ALLOWED_IN_THIS_CONTEXT );
DynamicFunctionSymbol dfs = getOwner().parseFunctionDecl( fs, ctorNameToken, false, false, modifiers );
fs.setDynamicFunctionSymbol( dfs );
pushStatement( fs );
if( bInterface && !match( null, null, '{', true ) ) {
modifiers.addModifiers( Modifier.ABSTRACT );
dfs.setAbstract( true );
}
if( dfs != null )
{
dfs.setClassMember( true );
boolean bConstructor = dfs.getDisplayName().equals( gsClass.getRelativeName() );
if( bConstructor )
{
verify( fs, !Modifier.isAbstract(modifiers.getModifiers()), Res.MSG_MODIFIER_ABSTRACT_NOT_ALLOWED_HERE );
verify( fs, !gsClass.isInterface(), Res.MSG_NOT_ALLOWED_IN_INTERFACE );
verify( fs, !(gsClass instanceof IGosuProgramInternal), Res.MSG_CONSTRUCTORS_NOT_ALLOWD_IN_THIS_CONTEXT );
verify( fs, !Modifier.isOverride( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_override, Keyword.KW_construct );
verify( fs, !Modifier.isFinal( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_final, Keyword.KW_construct );
verify( fs, !Modifier.isStatic( modifiers.getModifiers() ), Res.MSG_NO_STATIC_CONSTRUCTOR );
verify( fs, !Modifier.isTransient( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_transient, Keyword.KW_construct );
if( !bConstructKeyword )
{
fs.addParseWarning( new ObsoleteConstructorWarning( constructOrFunctionState, Res.MSG_OBSOLETE_CTOR_SYNTAX ) );
}
}
else
{
verifyNoCombinedFinalStaticModifierDefined( fs, false, modifiers.getModifiers() );
verify(fs, !Modifier.isAbstract(modifiers.getModifiers()) || !Modifier.isStatic(modifiers.getModifiers()), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_static, Keyword.KW_abstract);
verify(fs, !Modifier.isAbstract(modifiers.getModifiers()) || !Modifier.isFinal(modifiers.getModifiers()), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_final, Keyword.KW_abstract);
verify( fs, !Modifier.isTransient(modifiers.getModifiers()), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_transient, Keyword.KW_function );
}
}
eatOptionalSemiColon( bInterface );
if( !Modifier.isNative( modifiers.getModifiers() ) && !Modifier.isAbstract( modifiers.getModifiers() ) )
{
eatStatementBlock( fs, Res.MSG_EXPECTING_OPEN_BRACE_FOR_FUNCTION_DEF );
}
return dfs;
}
else if( T[0] != null && T[0].equals( Keyword.KW_property.toString() ) )
{
boolean bGetter = match( null, Keyword.KW_get );
verify( getClassStatement(), bGetter || match( null, Keyword.KW_set ), Res.MSG_EXPECTING_PROPERTY_GET_OR_SET_MODIFIER );
FunctionStatement fs = new FunctionStatement();
verifyNoCombinedFinalStaticModifierDefined( fs, false, modifiers.getModifiers() );
verify( fs, !(gsClass instanceof IGosuProgramInternal) || !((IGosuProgramInternal)gsClass).isStatementsOnly(),
Res.MSG_FUNCTIONS_NOT_ALLOWED_IN_THIS_CONTEXT );
DynamicFunctionSymbol dfs = getOwner().parseFunctionDecl( fs, true, bGetter, modifiers );
if( dfs == null )
{
getClassStatement().addParseException( new ParseException( makeFullParserState(), Res.MSG_EXPECTING_DECL ) );
return null;
}
if( dfs.getDisplayName().length() > 0 &&
dfs.getDisplayName().charAt(0) == '@' )
{
String name = dfs.getDisplayName().substring(1);
boolean bOuterLocalDefined = findLocalInOuters( name ) instanceof CapturedSymbol;
verifyOrWarn( fs, !bOuterLocalDefined, false, Res.MSG_VARIABLE_ALREADY_DEFINED, name );
}
if( bInterface && !match( null, null, '{', true ) )
{
verify( fs, !Modifier.isFinal( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_final, Keyword.KW_property );
modifiers.setModifiers( Modifier.setAbstract( modifiers.getModifiers(), true ) );
dfs.setAbstract( true );
}
verify( fs, !Modifier.isAbstract( modifiers.getModifiers() ) || !Modifier.isStatic( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_static, Keyword.KW_abstract );
verify( fs, !Modifier.isAbstract( modifiers.getModifiers() ) || !Modifier.isFinal( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_final, Keyword.KW_abstract );
verify( fs, !Modifier.isAbstract( modifiers.getModifiers() ) || gsClass.isAbstract(), Res.MSG_ABSTRACT_MEMBER_NOT_IN_ABSTRACT_CLASS );
fs.setDynamicFunctionSymbol( dfs );
pushStatement( fs );
setLocation( location[0], location[1], location[2] );
popStatement();
dfs.setClassMember( true );
eatOptionalSemiColon( bInterface );
if( !bInterface &&
!Modifier.isNative( modifiers.getModifiers() ) && !Modifier.isAbstract( modifiers.getModifiers() ) )
{
eatStatementBlock( fs, Res.MSG_EXPECTING_OPEN_BRACE_FOR_FUNCTION_DEF );
}
DynamicPropertySymbol dps = getOrCreateDynamicPropertySymbol( getClassStatement(), gsClass, dfs, bGetter );
PropertyStatement statement = new PropertyStatement( fs, dps );
verifyPropertiesAreSymmetric( bGetter, dfs, dps, statement );
pushStatement( statement );
return dps;
}
else if( T[0] != null && T[0].equals( Keyword.KW_var.toString() ) )
{
if( bInterface )
{
modifiers.setModifiers( Modifier.setStatic( modifiers.getModifiers(), true ) );
modifiers.setModifiers( Modifier.setFinal( modifiers.getModifiers(), true ) );
}
return parseFieldDecl( modifiers );
}
else if( T[0] != null && T[0].equals( Keyword.KW_delegate.toString() ) )
{
return parseDelegateDecl( modifiers, gsClass );
}
else
{
getClassStatement().addParseException( new ParseException( makeFullParserState(), Res.MSG_EXPECTING_DECL ) );
return null;
}
}
private void verifySuperTypeVarVariance( ClassStatement classStatement, IType type )
{
if( !type.isParameterizedType() || !getGosuClass().isGenericType() )
{
return;
}
IGenericTypeVariable[] gtvs = type.getGenericType().getGenericTypeVariables();
IType[] typeParameters = type.getTypeParameters();
for( int i = 0; i < typeParameters.length; i++ )
{
if( gtvs[i] != null && gtvs[i].getTypeVariableDefinition() != null )
{
Variance variance = Variance.maybeInferVariance( type, gtvs[i] );
verifyTypeVarVariance( variance, classStatement, typeParameters[i] );
}
}
}
private void verifyTypeVarVariance( Variance ctxVariance, ParsedElement elem, IType type )
{
if( !getGosuClass().isGenericType() )
{
return;
}
Variance.verifyTypeVarVariance( ctxVariance,
getGosuClass(),
( Variance ctxV, Variance typeVarV ) -> {
verify( elem, typeVarV == ctxV || typeVarV == Variance.DEFAULT || typeVarV == Variance.INVARIANT || ctxV == Variance.PENDING || typeVarV == Variance.PENDING,
Res.MSG_TYPE_VAR_VARIANCE_ERROR, type.getRelativeName(), typeVarV == null ? "null" : typeVarV.getDesc(), ctxV.getDesc(), type.getRelativeName() );
},
type );
}
private void verifyPropertiesAreSymmetric( boolean bGetter,
DynamicFunctionSymbol newFunction,
DynamicPropertySymbol propertySymbol,
Statement stmt )
{
DynamicFunctionSymbol getter;
DynamicFunctionSymbol setter;
if( bGetter )
{
getter = newFunction;
setter = propertySymbol == null ? null : propertySymbol.getSetterDfs();
}
else
{
getter = propertySymbol == null ? null : propertySymbol.getGetterDfs();
setter = newFunction;
}
if( getter != null && setter != null )
{
if( getter.isStatic() != setter.isStatic() )
{
verify( stmt, false, Res.MSG_PROPERTIES_MUST_AGREE_ON_STATIC_MODIFIERS );
}
if( setter.getArgs().size() == 1 )
{
IType setterType = setter.getArgTypes()[0];
IType returnType = getter.getReturnType();
if( !setterType.isAssignableFrom( returnType ) ||
!setterType.isAssignableFrom( propertySymbol.getType() ) )
{
verify( stmt, false, Res.MSG_PROPERTIES_MUST_AGREE_ON_TYPE );
}
}
}
else if( getter != null && propertySymbol != null && newFunction != null &&
getGosuClass() == propertySymbol.getScriptPart().getContainingType() &&
getter.getSuperDfs() == null )
{
verify( stmt, propertySymbol.getType().equals( newFunction.getReturnType() ), Res.MSG_PROPERTIES_MUST_AGREE_ON_TYPE );
}
}
private FunctionStatement makeFunctionOrConstructorStatement( IGosuClassInternal gsClass, String strMemberKeyword, boolean bConstructKeyword )
{
FunctionStatement fs;
if( gsClass != null &&
(bConstructKeyword || gsClass.getRelativeName().equals( strMemberKeyword )) )
{
fs = new ConstructorStatement( bConstructKeyword );
}
else
{
fs = new FunctionStatement();
}
return fs;
}
private ModifierInfo parseUntilMemberKeyword( String[] T, boolean bIgnoreErrors, int[] location )
{
return parseUntilMemberKeyword( T, bIgnoreErrors, -1, location );
}
private ModifierInfo parseUntilMemberKeyword( String[] T, boolean bIgnoreErrors, int iEnd, int[] location )
{
return parseUntilMemberKeyword( T, bIgnoreErrors, iEnd, location, null );
}
private ModifierInfo parseUntilMemberKeyword( String[] T, boolean bIgnoreErrors, int iEnd, int[] location, int[] mark )
{
boolean bPeek = T == null;
while( true )
{
if( location != null )
{
location[0] = getTokenizer().getTokenStart();
location[1] = getTokenizer().getLineNumber();
location[2] = getTokenizer().getTokenColumn();
}
if( mark != null )
{
mark[0] = getTokenizer().mark();
}
ModifierInfo modifiers = parseModifiers( bIgnoreErrors );
if( matchDeclarationKeyword( T, bPeek, getTokenizer() ) )
{
return modifiers;
}
popModifierList();
boolean bAte = false;
if( getGosuClass() instanceof IGosuProgram )
{
bAte = eatPossibleEnclosedVarInStmt(); // e.g., for( var foo in foos ) {...} we don't want the var foo to be consumed as a field (applies to GosuPrograms).
}
bAte = eatPossibleStatementBlock() || bAte;
if( location != null )
{
// Mark possible end location of member definition
location[0] = getTokenizer().getTokenStart();
}
if( match( null, SourceCodeTokenizer.TT_EOF ) ||
((!(getGosuClass() instanceof IGosuProgram) || !getGosuClass().isHeaderCompiled()) && match( null, '}' )) ||
(iEnd >= 0 && getTokenizer().getTokenStart() >= iEnd) )
{
modifiers.setModifiers( -1 );
return modifiers;
}
if( !bAte )
{
getTokenizer().nextToken();
if( getTokenizer().isEOF() )
{
modifiers.setModifiers( -1 );
return modifiers;
}
}
}
}
private void popModifierList()
{
ParseTree parseTree = getOwner().peekLocation();
if( parseTree == null )
{
return;
}
ParsedElement pe = parseTree.getParsedElement();
if( pe instanceof IModifierListClause )
{
List<ParseTree> locationsList = getLocationsList();
locationsList.remove( locationsList.size()-1 );
}
}
private void parseInnerClassDeclaration()
{
IGosuClassInternal enclosingGsClass = getClassStatement().getGosuClass();
int mark = getTokenizer().mark();
String strInnerClass = null;
if( verify( getClassStatement(), match( null, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_NAME_CLASS_DEF ) )
{
strInnerClass = getTokenizer().getTokenAt( mark ).getStringValue();
}
if( strInnerClass != null )
{
String name = enclosingGsClass.getName();
String dotInner = "." + strInnerClass;
verify( getClassStatement(), !name.equals(strInnerClass) &&
!name.contains(dotInner + ".") &&
!name.endsWith(dotInner) &&
!name.startsWith(strInnerClass + "."), Res.MSG_DUPLICATE_CLASS_FOUND, name + dotInner );
for( IGosuClass c : enclosingGsClass.getKnownInnerClassesWithoutCompiling().values() )
{
IGosuClassInternal innerClass = (IGosuClassInternal)c;
if( innerClass.getRelativeName().equals( strInnerClass ) )
{
int i = 0;
String relativeName = innerClass.getName();
while( innerClass.isDeclarationsCompiled() || innerClass.isDeclarationsBypassed() )
{
// The inner class is already declaration-compiled, maybe this is a duplicate inner class...
String duplicate = relativeName + "_duplicate_" + i++;
innerClass = (IGosuClassInternal)TypeSystem.getByFullNameIfValid( duplicate );
if( innerClass == null )
{
return;
}
}
parseInnerClassDeclaration( innerClass );
break;
}
}
}
}
private void parseInnerClassDeclaration( IGosuClassInternal innerClass ) {
// Preserve dfs decls map of outer class
Map<String, List<IFunctionSymbol>> restoreDfsDecls = copyDFSDecls( getOwner() );
try {
new GosuClassParser( getOwner(), innerClass ).parseDeclarations( innerClass );
if( innerClass.isInterface() )
{
ModifierInfo mi = (ModifierInfo)innerClass.getModifierInfo();
mi.setModifiers( Modifier.setStatic( mi.getModifiers(), true ));
}
}
finally {
getOwner().setDfsDeclInSetByName( restoreDfsDecls );
}
}
private static Map<String, List<IFunctionSymbol>> copyDFSDecls( GosuParser owner )
{
Map<String, List<IFunctionSymbol>> hashMap = new HashMap<>( owner.getDfsDecls() );
for( String name : hashMap.keySet() )
{
hashMap.put( name, new ArrayList<>( hashMap.get( name ) ) );
}
return hashMap;
}
private VarStatement parseFieldDecl( ModifierInfo modifiers )
{
Token T = new Token();
VarStatement varStmt = new VarStatement();
verify( varStmt, !Modifier.isAbstract( modifiers.getModifiers() ), Res.MSG_MODIFIER_ABSTRACT_NOT_ALLOWED_HERE );
verify( varStmt, !Modifier.isOverride( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_override, Keyword.KW_var );
final int iNameStart = getTokenizer().getTokenStart();
if( !verify( varStmt, match( T, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_IDENTIFIER_VAR ) )
{
T._strValue = null;
}
String strIdentifier = T._strValue == null ? "" : T._strValue;
boolean bAlreadyDefined = getSymbolTable().getSymbol( strIdentifier ) != null;
verify( varStmt, !bAlreadyDefined, Res.MSG_VARIABLE_ALREADY_DEFINED, strIdentifier );
checkForEnumConflict( varStmt, strIdentifier );
boolean bStatic = Modifier.isStatic( modifiers.getModifiers() );
GlobalScope scope;
if( match( null, Keyword.KW_application ) )
{
// NOTE: For class parsing APPLICATION == static
bStatic = true;
scope = GlobalScope.EXECUTION;
verifyOrWarn( varStmt, false, true, Res.MSG_APPLICATION_MODIFIER_HAS_BEEN_DEPRECATED );
}
else if( match( null, Keyword.KW_session ) )
{
bStatic = true;
scope = GlobalScope.SESSION;
verifyOrWarn( varStmt, false, true, Res.MSG_SESSION_MODIFIER_HAS_BEEN_DEPRECATED );
}
else if( match( null, Keyword.KW_request ) )
{
bStatic = true;
scope = GlobalScope.REQUEST;
verifyOrWarn( varStmt, false, true, Res.MSG_REQUEST_MODIFIER_HAS_BEEN_DEPRECATED );
}
else
{
// execution keyword may be there
boolean hasExecutionKeyword = match( null, Keyword.KW_execution );
scope = GlobalScope.EXECUTION;
verifyOrWarn( varStmt, !hasExecutionKeyword, true, Res.MSG_EXECUTION_MODIFIER_HAS_BEEN_DEPRECATED );
}
TypeLiteral typeLiteral = null;
if( match( null, ":", SourceCodeTokenizer.TT_OPERATOR ) )
{
getOwner().parseTypeLiteral();
typeLiteral = (TypeLiteral)popExpression();
}
else if( !match( null, "=", SourceCodeTokenizer.TT_OPERATOR, true ) )
{
if( match( null, null, '(', true ) )
{
getOwner().parseBlockLiteral();
typeLiteral = (TypeLiteral)popExpression();
}
}
IType type;
if( typeLiteral != null )
{
type = typeLiteral.getType().getType();
varStmt.setTypeLiteral( typeLiteral );
}
else
{
type = GosuParserTypes.NULL_TYPE();
}
varStmt.setScope( scope );
if( bStatic )
{
modifiers.setModifiers( Modifier.setStatic( modifiers.getModifiers(), true ) );
}
varStmt.setModifierInfo( modifiers );
if( !verify( varStmt, varStmt.isPrivate() || type != GosuParserTypes.NULL_TYPE(), Res.MSG_NON_PRIVATE_MEMBERS_MUST_DECLARE_TYPE ) )
{
type = ErrorType.getInstance();
}
DynamicPropertySymbol dpsVarProperty = getOwner().parseVarPropertyClause( varStmt, strIdentifier, type, false );
if( dpsVarProperty != null )
{
String propertyName = dpsVarProperty.getName();
ISymbol existingSym = getSymbolTable().getSymbol(propertyName);
boolean bOuterLocalDefined = findLocalInOuters( propertyName ) instanceof CapturedSymbol;
bAlreadyDefined = existingSym != null || bOuterLocalDefined || propertyName.equals( strIdentifier );
verify( varStmt, !bAlreadyDefined || existingSym instanceof DynamicPropertySymbol, Res.MSG_VARIABLE_ALREADY_DEFINED, propertyName );
getSymbolTable().putSymbol( dpsVarProperty );
verifyPropertiesAreSymmetric( true, dpsVarProperty.getGetterDfs(), dpsVarProperty, varStmt );
setStatic( bStatic, dpsVarProperty );
dpsVarProperty.addMemberSymbols( getGosuClass() );
}
AbstractDynamicSymbol symbol;
if( varStmt.getScope() == GlobalScope.EXECUTION )
{
symbol = new DynamicSymbol( getGosuClass(), getSymbolTable(), strIdentifier, type, null );
}
else
{
symbol = new ScopedDynamicSymbol( getSymbolTable(), strIdentifier, getGosuClass().getName(), type, varStmt.getScope() );
}
modifiers.addAll( symbol.getModifierInfo() );
if( varStmt.isPrivate() )
{
// Ensure private bit is explicit
modifiers.setModifiers( Modifier.setPrivate( modifiers.getModifiers(), true ) );
}
symbol.setModifierInfo( modifiers );
varStmt.setSymbol( symbol );
varStmt.setNameOffset( iNameStart, T._strValue );
if( bAlreadyDefined )
{
int iDupIndex = getOwner().nextIndexOfErrantDuplicateDynamicSymbol( symbol, getSymbolTable().getSymbols().values(), false );
if( iDupIndex >= 0 )
{
symbol.renameAsErrantDuplicate( iDupIndex );
}
}
getSymbolTable().putSymbol( symbol );
pushStatement( varStmt );
return varStmt;
}
private void checkForEnumConflict( VarStatement varStmt, String identifier )
{
if( getGosuClass().isEnum() )
{
ISymbol existingProp = getGosuClass().getMemberProperty( identifier );
verify( varStmt, !(existingProp instanceof DynamicPropertySymbol), Res.MSG_VARIABLE_ALREADY_DEFINED, identifier );
}
}
private VarStatement parseDelegateDecl( ModifierInfo modifiers, IGosuClassInternal gsClass )
{
Token T = new Token();
DelegateStatement delegateStmt = new DelegateStatement();
verify( delegateStmt, gsClass == null || (!gsClass.isInterface() && !gsClass.isEnum()), Res.MSG_DELEGATION_NOT_ALLOWED_HERE );
verify( delegateStmt, !Modifier.isStatic( modifiers.getModifiers() ), Res.MSG_DELEGATES_CANNOT_BE_STATIC );
verify( delegateStmt, !Modifier.isAbstract( modifiers.getModifiers() ), Res.MSG_MODIFIER_ABSTRACT_NOT_ALLOWED_HERE );
verify( delegateStmt, !Modifier.isOverride( modifiers.getModifiers() ), Res.MSG_ILLEGAL_USE_OF_MODIFIER, Keyword.KW_override, Keyword.KW_var );
int iNameOffset = getTokenizer().getTokenStart();
if( verify( delegateStmt, match( T, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_IDENTIFIER_VAR ) )
{
delegateStmt.setNameOffset( iNameOffset, null );
}
String strIdentifier = T._strValue == null ? "" : T._strValue;
verify( delegateStmt, getSymbolTable().getSymbol( strIdentifier ) == null, Res.MSG_VARIABLE_ALREADY_DEFINED, strIdentifier );
GlobalScope scope = GlobalScope.EXECUTION;
TypeLiteral typeLiteral = null;
if( match( null, ":", SourceCodeTokenizer.TT_OPERATOR ) )
{
getOwner().parseTypeLiteral();
typeLiteral = (TypeLiteral)popExpression();
}
List<IType> constituents = new ArrayList<IType>();
if( verify( delegateStmt, match( null, Keyword.KW_represents ), Res.MSG_EXPECTING_REPRESENTS ) )
{
do
{
getOwner().parseTypeLiteral();
TypeLiteral ifaceLiteral = (TypeLiteral)popExpression();
IType iface = ifaceLiteral.getType().getType();
if( !(iface instanceof ErrorType) )
{
verify( ifaceLiteral, iface.isInterface() && !iface.isCompoundType(), Res.MSG_DELEGATES_REPRESENT_INTERFACES_ONLY );
verify( ifaceLiteral, TypeLord.isDelegatableInterface( gsClass, iface ), Res.MSG_CLASS_DOES_NOT_IMPL, iface );
verify( typeLiteral, typeLiteral == null || TypeLord.isDelegatableInterface( typeLiteral.getType().getType(), iface ), Res.MSG_CLASS_DOES_NOT_IMPL, iface );
}
constituents.add( iface );
} while( match( null, ',' ) );
}
delegateStmt.setConstituents( constituents );
IType type;
if( typeLiteral != null )
{
type = typeLiteral.getType().getType();
delegateStmt.setTypeLiteral( typeLiteral );
}
else
{
type = GosuParserTypes.NULL_TYPE();
}
delegateStmt.setScope( scope );
delegateStmt.setModifierInfo( modifiers );
verify( delegateStmt, delegateStmt.isPrivate() || type != GosuParserTypes.NULL_TYPE(), Res.MSG_NON_PRIVATE_MEMBERS_MUST_DECLARE_TYPE );
AbstractDynamicSymbol symbol = new DynamicSymbol( getGosuClass(), getSymbolTable(), strIdentifier, type, null );
modifiers.addAll( symbol.getModifierInfo() );
symbol.setModifierInfo( modifiers );
delegateStmt.setSymbol( symbol );
getSymbolTable().putSymbol( symbol );
pushStatement( delegateStmt );
return delegateStmt;
}
private void setStatic( boolean bStatic, DynamicPropertySymbol dpsVarProperty )
{
dpsVarProperty.setStatic( bStatic );
if( dpsVarProperty.getSetterDfs() != null )
{
dpsVarProperty.getSetterDfs().setStatic( bStatic );
}
if( dpsVarProperty.getGetterDfs() != null )
{
dpsVarProperty.getGetterDfs().setStatic( bStatic );
}
}
//------------------------------------------------------------------------------
//
// class-statement
// [modifiers] class <identifier> [extends <base-class>] [implements <interfaces-list>] { <class-members> }
//
boolean parseClassStatement()
{
IGosuClassInternal gsClass = getGosuClass();
ensureAbstractMethodsImpledAndNoDiamonds( gsClass );
ensureInheritedMethodsDoNotClash( gsClass );
//## todo: remove this scope?
getSymbolTable().pushScope();
try
{
verify( getClassStatement(), gsClass instanceof IGosuProgram || match( null, '{' ), Res.MSG_EXPECTING_OPEN_BRACE_FOR_CLASS_DEF );
Statement currentStmt = (isTopLevelClass( gsClass ) || TypeLord.isEvalProgram( gsClass )) ? null : peekStatement();
parseClassMembers( gsClass );
for( Statement stmt = peekStatement(); stmt != currentStmt; stmt = peekStatement() )
{
stmt = popStatement();
if( stmt instanceof VarStatement ||
stmt instanceof FunctionStatement ||
stmt instanceof PropertyStatement ||
stmt instanceof NoOpStatement ||
stmt instanceof NamespaceStatement ||
stmt instanceof UsesStatement ||
stmt instanceof ClassStatement )
{
// ignore
}
else
{
throw new IllegalStateException( "Expecting only statements for: package, uses, var, function, or property." );
}
}
verify( getClassStatement(), match( null, '}' ) || gsClass instanceof IGosuProgram, Res.MSG_EXPECTING_CLOSE_BRACE_FOR_CLASS_DEF );
}
finally
{
getSymbolTable().popScope();
}
return true;
}
private void ensureInheritedMethodsDoNotClash( IGosuClassInternal gsClass )
{
if( !inheritsFromTwoOrMoreTypes( gsClass ) )
{
return;
}
MethodList methods = gsClass.getTypeInfo().getMethods( gsClass );
for( DynamicArray<IMethodInfo> bucket: methods.getMethodBuckets() )
{
if( bucket.size() > 1 )
{
Map<String, IReducedDynamicFunctionSymbol> functionTypes = new HashMap<>();
for( IMethodInfo mi : bucket )
{
if( mi instanceof IGosuMethodInfo )
{
IReducedDynamicFunctionSymbol dfs = ((IGosuMethodInfo)mi).getDfs();
IReducedDynamicFunctionSymbol originalDfs = dfs;
while( true )
{
IReducedDynamicFunctionSymbol superDfs = dfs.getSuperDfs();
if( superDfs != null && superDfs != dfs )
{
dfs = superDfs;
}
else
{
IReducedDynamicFunctionSymbol backingDfs = dfs.getBackingDfs();
if( backingDfs != null && backingDfs != dfs )
{
dfs = backingDfs;
}
else
{
break;
}
}
}
if( dfs != originalDfs && !(dfs.getGosuClass() instanceof IGosuEnhancement) )
{
FunctionType ft = ((FunctionType)dfs.getType()).getRuntimeType();
String paramSignature = ft.getParamSignature();
IReducedDynamicFunctionSymbol existingDfs = functionTypes.get( paramSignature );
if( existingDfs != null && existingDfs.getGosuClass() != dfs.getGosuClass() )
{
addError( getClassStatement(), Res.MSG_FUNCTION_CLASH_PARAMS, dfs.getName(), dfs.getGosuClass().getName(), existingDfs.getName(), existingDfs.getGosuClass().getName() );
}
functionTypes.put( paramSignature, dfs );
}
}
}
}
}
}
private boolean inheritsFromTwoOrMoreTypes( IGosuClassInternal gsClass )
{
int iCount = gsClass.getSuperClass() == null ? 0 : 1;
iCount += (gsClass.getInterfaces().length - 1); // subtract IGosuObject proxy
return iCount > 1;
}
private void ensureAbstractMethodsImpledAndNoDiamonds( IGosuClassInternal gsClass )
{
List<IFunctionType> unimpled = gsClass.getUnimplementedMethods();
for( Iterator<IFunctionType> iter = unimpled.iterator(); iter.hasNext(); )
{
IFunctionType funcType = iter.next();
final IMethodInfo mi = funcType.getMethodInfo();
if( mi.isDefaultImpl() )
{
// mi is a default interface method the class (or interface) does not override,
// check for a duplicate, not-overridden method that comes from an interface that
// is unrelated to mi's declaring interface
// i.e., prohibit "diamond" patterns directly interface-inherited from the class (or interface).
if( conflictsWithUnrelatedIfaceMethod( gsClass, funcType, unimpled ) )
{
iter.remove();
}
}
else if( !gsClass.isInterface() && !gsClass.isAbstract() )
{
// mi is abstract, the non-abstract class failed to implement it...
String strClass = funcType.getEnclosingType().getName();
strClass = IGosuClass.ProxyUtil.getNameSansProxy( strClass );
getClassStatement().addParseException( new NotImplementedParseException( makeFullParserState(), gsClass, strClass, funcType ) );
}
}
}
private boolean conflictsWithUnrelatedIfaceMethod( IGosuClassInternal gsClass, IFunctionType ft, List<IFunctionType> unimpled )
{
IMethodInfo mi = ft.getMethodInfo();
outer:
for( IFunctionType funcType: unimpled )
{
if( ft == funcType )
{
continue;
}
final IMethodInfo csrMi = funcType.getMethodInfo();
if( csrMi.getDisplayName().equals( mi.getDisplayName() ) &&
csrMi.getParameters().length == mi.getParameters().length &&
!csrMi.getOwnersType().isAssignableFrom( mi.getOwnersType() ) &&
!mi.getOwnersType().isAssignableFrom( csrMi.getOwnersType() ) )
{
IParameterInfo[] csrParams = csrMi.getParameters();
IParameterInfo[] params = mi.getParameters();
for( int i = 0; i < csrParams.length; i++ )
{
IParameterInfo csrPi = csrParams[i];
IParameterInfo pi = params[i];
IRType csrDescriptor = IRTypeResolver.getDescriptor( csrPi.getFeatureType() );
IRType descriptor = IRTypeResolver.getDescriptor( pi.getFeatureType() );
if( !csrDescriptor.equals( descriptor ) )
{
break outer;
}
}
if( csrMi.isDefaultImpl() )
{
getClassStatement().addParseException( makeFullParserState(), Res.MSG_INHERITS_UNRELATED_DEFAULTS, gsClass.getName(), funcType, mi.getOwnersType().getName(), csrMi.getOwnersType().getName() );
}
else if( gsClass.isAbstract() ) // interface or abstract class
{
getClassStatement().addParseException( makeFullParserState(), Res.MSG_INHERITS_ABSTRACT_AND_DEFAULT, gsClass.getName(), funcType, mi.getOwnersType().getName(), csrMi.getOwnersType().getName() );
}
return true;
}
}
return false;
}
// class-members
// <class-member>
// <class-members> <class-member>
//
// class-member
// <function-definition>
// <var-statement>
//
private void parseClassMembers( IGosuClassInternal gsClass )
{
if( isInnerClass( gsClass ) && !gsClass.isStatic() )
{
addOuterMember( gsClass );
}
ClassScopeCache scopeCache = makeClassScopeCache( gsClass );
parseEnumConstants( gsClass, scopeCache );
do
{
getOwner().checkInstruction( true );
while( match( null, ';' ) )
{
pushStatement( new NoOpStatement() );
}
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
ModifierInfo modifiers;
if( gsClass instanceof IGosuProgram )
{
int[] locations = new int[3];
modifiers = parseUntilMemberKeyword( null, false, locations );
iOffset = locations[0];
iLineNum = locations[1];
iColumn = locations[2];
}
else
{
// push static class symbols for annotations (they are part of modifier parsing)
pushClassSymbols( true, scopeCache );
try
{
modifiers = parseModifiers();
}
finally
{
popClassSymbols();
}
}
boolean bStatic = Modifier.isStatic( modifiers.getModifiers() );
if( gsClass.isInterface() )
{
modifiers.setModifiers( Modifier.setPublic( modifiers.getModifiers(), true ) );
}
boolean bDeprecated = isDeprecated( modifiers );
if( bDeprecated )
{
getOwner().pushIgnoreTypeDeprecation();
}
try
{
boolean bConstructSyntax = false;
Token T = new Token();
if( match( null, Keyword.KW_function ) ||
(bConstructSyntax = match( null, Keyword.KW_construct )) )
{
FunctionStatement functionStmt;
if( bConstructSyntax || isOldStyleConstructor( gsClass, T ) )
{
functionStmt = parseBaseConstructorDefinition( bConstructSyntax, modifiers.getAnnotations(), scopeCache );
verifyModifiers( functionStmt, modifiers, UsageTarget.ConstructorTarget );
}
else
{
pushClassSymbols( bStatic, scopeCache );
try
{
functionStmt = getOwner().parseBaseFunctionDefinition( null, false, false, modifiers );
if( gsClass.isInterface() && !bStatic )
{
eatOptionalSemiColon( true );
pushStatement( functionStmt );
}
verifyModifiers( functionStmt, modifiers, UsageTarget.MethodTarget );
}
finally
{
popClassSymbols();
}
}
DynamicFunctionSymbol dfs = functionStmt == null ? null : functionStmt.getDynamicFunctionSymbol();
if( dfs != null )
{
dfs.setClassMember( true );
if( dfs.getDisplayName().equals( gsClass.getRelativeName() ) )
{
gsClass.getParseInfo().addConstructorFunction(dfs);
}
else
{
gsClass.getParseInfo().addMemberFunction(dfs);
}
verifyTypeVarVariance( Variance.COVARIANT, functionStmt, dfs.getType() );
}
setLocation( iOffset, iLineNum, iColumn );
}
else if( match( null, Keyword.KW_property ) )
{
pushClassSymbols( bStatic, scopeCache );
try
{
Token t = new Token();
boolean bGetter = match( t, Keyword.KW_get );
boolean bSetter = !bGetter && match( null, Keyword.KW_set );
if( !bGetter && !bSetter )
{
getOwner().maybeEatNonDeclKeyword( false, t._strValue );
}
FunctionStatement functionStmt = getOwner().parseBaseFunctionDefinition( null, true, bGetter, modifiers );
verify( functionStmt, bGetter || bSetter, Res.MSG_EXPECTING_PROPERTY_GET_OR_SET_MODIFIER );
setLocation( iOffset, iLineNum, iColumn );
getOwner().popStatement();
DynamicFunctionSymbol dfs = functionStmt.getDynamicFunctionSymbol();
if( dfs != null )
{
IType returnType = functionStmt.getDynamicFunctionSymbol().getReturnType();
verify( functionStmt, bGetter || returnType == JavaTypes.pVOID(), Res.MSG_PROPERTY_SET_MUST_RETURN_VOID );
if( bGetter && dfs.getArgTypes() != null && dfs.getArgTypes().length > 0 )
{
List<IParameterDeclaration> parameters = functionStmt.getParameters();
for( IParameterDeclaration par : parameters )
{
par.addParseException( Res.MSG_GETTER_CANNOT_HAVE_PARAMETERS );
}
}
dfs.setClassMember( true );
DynamicPropertySymbol dps = getOrCreateDynamicPropertySymbol( functionStmt, gsClass, dfs, bGetter );
PropertyStatement stmt = new PropertyStatement( functionStmt, dps );
getOwner().pushStatement( stmt );
setLocation( iOffset, iLineNum, iColumn, true );
verifyPropertiesAreSymmetric( bGetter, dfs, dps, stmt );
dps.addMemberSymbols( gsClass );
if( bGetter )
{
verifyTypeVarVariance( Variance.COVARIANT, functionStmt, dps.getGetterDfs().getReturnType() );
}
else if( dps.getSetterDfs().getArgTypes().length > 0 )
{
verifyTypeVarVariance( Variance.CONTRAVARIANT, functionStmt, dps.getSetterDfs().getArgTypes()[0] );
}
}
verifyModifiers( functionStmt, modifiers, UsageTarget.PropertyTarget );
}
finally
{
popClassSymbols();
}
}
else if( match( null, Keyword.KW_var ) )
{
getOwner().pushParsingStaticMember( bStatic );
try
{
VarStatement varStmt = parseFieldDefn( gsClass, bStatic, scopeCache, modifiers );
verifyTypeVarVariance( Variance.INVARIANT, varStmt, varStmt.getType() );
setLocation( iOffset, iLineNum, iColumn );
removeInitializerIfInProgram( varStmt );
verifyModifiers( varStmt, modifiers, UsageTarget.PropertyTarget );
}
finally
{
getOwner().popParsingStaticMember();
}
}
else if( match( null, Keyword.KW_delegate ) )
{
DelegateStatement ds = parseDelegateDefn( gsClass, scopeCache, modifiers );
verifyModifiers( ds, modifiers, UsageTarget.PropertyTarget );
verifyTypeVarVariance( Variance.INVARIANT, ds, ds.getType() );
setLocation( iOffset, iLineNum, iColumn );
}
else if( match( T, Keyword.KW_class ) ||
match( T, Keyword.KW_interface ) ||
match( T, Keyword.KW_annotation ) ||
match( T, Keyword.KW_structure ) ||
match( T, Keyword.KW_enum ) )
{
// Pop the modifier list from the declaration phase, otherwise we'll have duplicates
popModifierList();
IGosuClassInternal inner = parseInnerClassDefinition( T );
if( inner != null )
{
inner.setAnnotations( modifiers.getAnnotations() );
if( inner.isInterface() )
{
modifiers.setModifiers( Modifier.setStatic( modifiers.getModifiers(), true ) );
ModifierInfo existingMI = (ModifierInfo)inner.getModifierInfo();
existingMI.addModifiers( modifiers.getModifiers() );
}
verifyModifiers( inner.getClassStatement(), modifiers, UsageTarget.TypeTarget );
}
}
else
{
// Pop the trailing modifier list, which doesn't correspond to any member
popModifierList();
if( !match( null, null, '}', true ) &&
!match( null, SourceCodeTokenizer.TT_EOF ) )
{
// Consume token first
boolean openBrace = false;
if( match( null, '{' ) )
{
openBrace = true;
}
else
{
getOwner().getTokenizer().nextToken();
}
NoOpStatement noop = new NoOpStatement();
verify( noop, false, Res.MSG_UNEXPECTED_TOKEN, getOwner().getTokenizer().getTokenAsString() );
pushStatement( noop );
setLocation( iOffset, iLineNum, iColumn );
if( openBrace )
{
eatBlock( '{', '}', false );
}
}
else
{
break;
}
}
}
finally
{
if( bDeprecated )
{
getOwner().popIgnoreTypeDeprecation();
}
}
} while( true );
}
private boolean isDeprecated( ModifierInfo modifiers )
{
List<IGosuAnnotation> annotations = modifiers.getAnnotations();
if( annotations != null )
{
for( IGosuAnnotation an : annotations )
{
if( an.getName().equalsIgnoreCase( "Deprecated" ) )
{
return true;
}
}
}
return false;
}
private boolean isOldStyleConstructor( IGosuClassInternal gsClass, Token t )
{
return match( t, null, SourceCodeTokenizer.TT_WORD, true ) &&
!gsClass.isInterface() &&
t._strValue.equals( gsClass.getRelativeName() );
}
private ClassScopeCache makeClassScopeCache( IGosuClassInternal gsClass )
{
// Copy the Static Scope so we can reuse it for each member
//
IScope staticScope;
Map<String, List<IFunctionSymbol>> staticDfsMap;
getSymbolTable().pushScope();
try
{
//getOwner().clearDfsDeclInSetByName();
getOwner().newDfsDeclInSetByName();
gsClass.putClassMembers( getOwner(), getSymbolTable(), getGosuClass(), true );
staticDfsMap = getOwner().getDfsDecls();
}
finally
{
staticScope = getSymbolTable().popScope();
}
// Copy the Non-Static Scope so we can reuse it for each member
//
IScope nonstaticScope;
Map<String, List<IFunctionSymbol>> nonstaticDfsMap;
getSymbolTable().pushScope();
try
{
getOwner().newDfsDeclInSetByName();
gsClass.putClassMembers( getOwner(), getSymbolTable(), getGosuClass(), false );
nonstaticDfsMap = getOwner().getDfsDecls();
getOwner().newDfsDeclInSetByName();
}
finally
{
nonstaticScope = getSymbolTable().popScope();
}
return new ClassScopeCache( staticScope, staticDfsMap, nonstaticScope, nonstaticDfsMap );
}
private void popClassSymbols()
{
getSymbolTable().popScope();
getOwner().popParsingStaticMember();
getOwner().newDfsDeclInSetByName();
}
private void pushClassSymbols( boolean bStatic, ClassScopeCache classScopeCache )
{
getOwner().setDfsDeclInSetByName( bStatic ? classScopeCache.getStaticDfsMap() : classScopeCache.getNonstaticDfsMap() );
getSymbolTable().pushScope( bStatic ? classScopeCache.getStaticScope() : classScopeCache.getNonstaticScope() );
getOwner().pushParsingStaticMember( bStatic );
}
private void removeInitializerIfInProgram( VarStatement varStmt )
{
if( !(getGosuClass() instanceof IGosuProgram) || getOwner().isEditorParser() )
{
return;
}
ParseTree location = varStmt.getLocation();
List<IParseTree> children = location.getChildren();
int iChildCount = children.size();
if( iChildCount > 3 )
{
if( iChildCount > 4 )
{
if( !(children.get( 3 ).getParsedElement() instanceof NameInDeclaration) ) // this is another NameInDeclaration for the Property name, which can be null if the name was not specified after the 'as' clause
{
throw new IllegalStateException( "Expecting children: 1 for NameInDeclaration, 1 for the type, (maybe another NameInDeclaration if an alias property declared), and 1 for the initializer" );
}
}
IParseTree initializerExpr = children.get( iChildCount -1 );
if( initializerExpr != null )
{
location.removeChild( initializerExpr );
}
}
}
private IGosuClassInternal parseInnerClassDefinition( Token t )
{
IGosuClassInternal enclosingGsClass = getClassStatement().getGosuClass();
verify( getClassStatement(), match( t, SourceCodeTokenizer.TT_WORD ), Res.MSG_EXPECTING_NAME_CLASS_DEF );
String strInnerClass = t._strValue;
if( strInnerClass != null )
{
for( IGosuClass c : enclosingGsClass.getKnownInnerClassesWithoutCompiling().values() )
{
IGosuClassInternal innerClass = (IGosuClassInternal)c;
if( innerClass.getRelativeName().equals( strInnerClass ) )
{
int i = 0;
String relativeName = innerClass.getName();
while( innerClass.isDefinitionsCompiled() )
{
// The inner class is already definition-compiled, maybe this is a duplicate inner class...
String duplicate = relativeName + "_duplicate_" + i++;
innerClass = (IGosuClassInternal)TypeSystem.getByFullNameIfValid( duplicate );
if( innerClass == null )
{
return null;
}
}
new GosuClassParser( getOwner(), innerClass ).parseDefinitions( innerClass );
return innerClass;
}
}
}
return null;
}
private void parseEnumConstants( IGosuClassInternal gsClass,
ClassScopeCache scopeCache )
{
boolean bEnum = gsClass != null && gsClass.isEnum();
if( !bEnum )
{
return;
}
Set<String> constants = new HashSet<String>();
Token t = new Token();
boolean bConst;
do
{
bConst = false;
int iOffset = getTokenizer().getTokenStart();
int iLineNum = getTokenizer().getLineNumber();
int iColumn = getTokenizer().getTokenColumn();
if( match( t, null, SourceCodeTokenizer.TT_WORD, true ) &&
!Keyword.isKeyword( t._strValue ) &&
match( t, SourceCodeTokenizer.TT_WORD ) )
{
parseEnumConstant( t._strValue, scopeCache, constants.contains( t._strValue ) );
setLocation(iOffset, iLineNum, iColumn);
constants.add( t._strValue );
popStatement();
bConst = true;
}
if( match( null, ';' ) )
{
break;
}
} while( bConst && match( null, ',' ) );
}
private void parseEnumConstant( String strIdentifier, ClassScopeCache scopeCache, boolean bIsDuplicate )
{
IGosuClassInternal gsClass = getGosuClass();
VarStatement varStmt = gsClass.getStaticField( strIdentifier );
if( bIsDuplicate )
{
VarStatement dup = new VarStatement();
dup.setSymbol( varStmt.getSymbol() );
dup.setModifierInfo( varStmt.getModifierInfo() );
dup.setParent( varStmt.getParent() );
dup.setScope( varStmt.getScope() );
varStmt = dup;
}
pushClassSymbols( true, scopeCache );
try
{
getOwner().parseNewExpressionOrAnnotation( gsClass, false, !match( null, null, '(', true ), null, -1 );
Expression asExpr = popExpression();
varStmt.setAsExpression( asExpr );
if( asExpr.hasParseExceptions() )
{
for( IParseIssue pe : asExpr.getParseExceptions() )
{
varStmt.addParseException( pe );
//noinspection ThrowableResultOfMethodCallIgnored
asExpr.removeParseException( pe.getMessageKey() );
}
}
varStmt.setScriptPart( getOwner().getScriptPart() );
pushStatement( varStmt );
ISymbol symbol = varStmt.getSymbol();
symbol.setType( gsClass );
varStmt.setType( gsClass );
varStmt.setEnumConstant( true );
varStmt.setDefinitionParsed( true );
//noinspection unchecked
scopeCache.getNonstaticScope().put( varStmt.getSymbol().getName(), varStmt.getSymbol() );
gsClass.getParseInfo().addMemberField(varStmt);
}
finally
{
popClassSymbols();
}
}
private VarStatement parseFieldDefn( IGosuClassInternal gsClass, boolean bStatic, ClassScopeCache scopeCache, ModifierInfo modifiers )
{
if( gsClass.isInterface() )
{
bStatic = true;
}
Token t = new Token();
String strIdentifier = "";
boolean bHasName;
if( bHasName = match( t, SourceCodeTokenizer.TT_WORD ) )
{
strIdentifier = t._strValue;
}
else
{
t._strValue = null;
}
getOwner().maybeEatNonDeclKeyword( bHasName, strIdentifier );
VarStatement varStmt;
boolean bOuterLocalDefined = findLocalInOuters( strIdentifier ) != null;
if( !bStatic )
{
varStmt = findMemberField( gsClass, strIdentifier );
if( varStmt == null )
{
// It might not be in the non-static map if it is a scoped variable
varStmt = findStaticMemberField( gsClass, strIdentifier );
if( varStmt != null )
{
bStatic = true;
}
}
}
else
{
varStmt = findStaticMemberField( gsClass, strIdentifier );
}
verifyOrWarn( varStmt, !bOuterLocalDefined, false, Res.MSG_VARIABLE_ALREADY_DEFINED, strIdentifier );
if( !bStatic && varStmt != null && varStmt.isStatic() )
{
// Force static scope if the var is static. This is for scoped vars
bStatic = true;
}
pushClassSymbols( bStatic, scopeCache );
try
{
if( varStmt == null )
{
// This is for error conditions like vars appearing on enhancements
varStmt = new VarStatement();
getOwner().parseVarStatement( varStmt, t, false );
}
else
{
getOwner().parseVarStatement( varStmt, t, true );
}
if( bStatic )
{
//noinspection unchecked
scopeCache.getNonstaticScope().put( varStmt.getSymbol().getName(), varStmt.getSymbol() );
}
DynamicPropertySymbol dps = getOwner().parseVarPropertyClause( varStmt, varStmt.getIdentifierName(), varStmt.getType(), true );
if( dps != null )
{
verifyPropertiesAreSymmetric( true, dps.getGetterDfs(), dps, varStmt );
setStatic( bStatic, dps );
dps.addMemberSymbols( gsClass );
dps.updateAnnotations( modifiers.getAnnotations() );
}
// Consume optional trailing semi as part of the statement
match( null, ';' );
varStmt.getModifierInfo().setAnnotations( modifiers.getAnnotations() );
gsClass.getParseInfo().addMemberField(varStmt);
return varStmt;
}
finally
{
popClassSymbols();
}
}
private ISymbol findLocalInOuters( String strIdentifier )
{
if( (isParsingBlock() || getParsingAnonymousClass() != null) && !getOwner().isParsingAnnotation() )
{
return captureSymbol( getCurrentEnclosingGosuClass(), strIdentifier, null );
}
return null;
}
private VarStatement findMemberField( IGosuClassInternal gsClass, String name )
{
gsClass.compileDeclarationsIfNeeded();
return assignPossibleDuplicateField( name, gsClass.getParseInfo().getMemberFields() );
}
private VarStatement findStaticMemberField( IGosuClassInternal gsClass, String name )
{
gsClass.compileDeclarationsIfNeeded();
return assignPossibleDuplicateField( name, gsClass.getParseInfo().getStaticFields() );
}
private VarStatement assignPossibleDuplicateField( String name, Map<String, VarStatement> fields )
{
VarStatement varStmt = fields.get( name );
varStmt = assignPossibleDuplicateField( name, varStmt, fields );
return varStmt;
}
VarStatement assignPossibleDuplicateField( String name, VarStatement varStmt, Map<String, VarStatement> map )
{
VarStatement result = varStmt;
if( varStmt == null || varStmt.isDefinitionParsed() )
{
int iMin = Integer.MAX_VALUE;
for( String nameCsr : map.keySet() )
{
String strName = nameCsr.toString();
if( strName.toLowerCase().contains( "_duplicate_" + name.toString().toLowerCase() ) )
{
VarStatement stmtCsr = map.get( nameCsr );
if( !stmtCsr.isDefinitionParsed() )
{
int iIndex = Integer.parseInt( strName.substring( 0, strName.indexOf( '_' ) ) );
if( iIndex < iMin )
{
iMin = iIndex;
result = stmtCsr;
}
}
}
}
}
return result;
}
private DelegateStatement parseDelegateDefn( IGosuClassInternal gsClass, ClassScopeCache scopeCache, ModifierInfo modifiers )
{
Token t = new Token();
int iNameOffset = getTokenizer().getTokenStart();
boolean bHasName = match( t, SourceCodeTokenizer.TT_WORD );
String strIdentifier = t._strValue == null ? "" : t._strValue;
getOwner().maybeEatNonDeclKeyword( bHasName, strIdentifier );
String insensitveIdentifier = strIdentifier;
VarStatement varStmt = gsClass.getMemberField( insensitveIdentifier );
if( varStmt != null )
{
varStmt.setNameOffset( iNameOffset, strIdentifier );
}
pushClassSymbols( false, scopeCache );
try
{
//Need to ensure that the varStmt is indeed a delegate statement, because it might be a conflicting var stmt
DelegateStatement delStmt;
if( varStmt instanceof DelegateStatement )
{
delStmt = (DelegateStatement)varStmt;
}
else
{
delStmt = new DelegateStatement();
delStmt.setModifierInfo( modifiers );
}
if( varStmt == null )
{
// This is for error conditions like delegates appearing on enhancements
varStmt = new DelegateStatement();
varStmt.setModifierInfo( modifiers );
varStmt.setSymbol( new Symbol( strIdentifier, JavaTypes.OBJECT(), null ) );
verify( delStmt, !Modifier.isStatic( modifiers.getModifiers() ), Res.MSG_DELEGATES_CANNOT_BE_STATIC );
getOwner().parseDelegateStatement( delStmt, strIdentifier );
}
else
{
getOwner().parseDelegateStatement( delStmt, strIdentifier );
}
gsClass.getParseInfo().addMemberField(varStmt);
return delStmt;
}
finally
{
popClassSymbols();
}
}
DynamicPropertySymbol getOrCreateDynamicPropertySymbol(
ParsedElement parsedElement, ICompilableTypeInternal gsClass, DynamicFunctionSymbol dfs, boolean bGetter )
{
String strPropertyName = dfs.getDisplayName().substring( 1 );
ISymbol symbol = getSymbolTable().getSymbol( strPropertyName );
if( symbol != null && !dfs.getDisplayName().contains( symbol.getDisplayName() ) )
{
// Force case sensitivity, mainly to make overrides consistent
symbol = null;
}
DynamicPropertySymbol dps;
if( !(gsClass instanceof IGosuClass && ((IGosuClass)gsClass).isCompilingDefinitions()) &&
!verify( parsedElement, symbol == null || symbol instanceof DynamicPropertySymbol, Res.MSG_VARIABLE_ALREADY_DEFINED, strPropertyName ) )
{
return new DynamicPropertySymbol( dfs, bGetter );
}
if( symbol == null ||
(gsClass != null &&
gsClass.getMemberProperty( strPropertyName ) == null &&
gsClass.getStaticProperty( strPropertyName ) == null) )
{
dps = new DynamicPropertySymbol( dfs, bGetter );
dps.setClassMember( true );
if( symbol != null )
{
assert symbol instanceof DynamicPropertySymbol;
dps.setParent( (DynamicPropertySymbol)symbol );
}
return dps;
}
else if( !(symbol instanceof DynamicPropertySymbol) )
{
// Error already applied from declaration phase
return new DynamicPropertySymbol( dfs, bGetter );
}
assert symbol instanceof DynamicPropertySymbol;
dps = (DynamicPropertySymbol)symbol;
if( bGetter )
{
verify( parsedElement,
strPropertyName.equals( Keyword.KW_outer.getName() ) ||
dps.getImmediateGetterDfs() == null ||
dps.getImmediateGetterDfs() instanceof VarPropertyGetFunctionSymbol ||
dps.getImmediateGetterDfs().getValueDirectly() != null ||
dps.getImmediateGetterDfs() == dfs ||
(dps.getImmediateGetterDfs().isAbstract() && !dfs.isAbstract()) ||
(gsClass != null && gsClass.isInterface()),
Res.MSG_GETTER_FOR_PROPERTY_ALREADY_DEFINED,
strPropertyName );
if( parsedElement.hasParseException( Res.MSG_FUNCTION_ALREADY_DEFINED ) &&
parsedElement.hasParseException( Res.MSG_GETTER_FOR_PROPERTY_ALREADY_DEFINED ) )
{
//noinspection ThrowableResultOfMethodCallIgnored
parsedElement.removeParseException( Res.MSG_FUNCTION_ALREADY_DEFINED );
}
dps.setGetterDfs( dfs );
}
else
{
verify( parsedElement,
dps.getImmediateSetterDfs() == null ||
dps.getImmediateSetterDfs() instanceof VarPropertySetFunctionSymbol ||
dps.getImmediateSetterDfs().getValueDirectly() != null ||
dps.getImmediateSetterDfs() == dfs ||
(dps.getImmediateSetterDfs().isAbstract() && !dfs.isAbstract()) ||
(gsClass != null && gsClass.isInterface()),
Res.MSG_SETTER_FOR_PROPERTY_ALREADY_DEFINED,
strPropertyName );
if( parsedElement.hasParseException( Res.MSG_FUNCTION_ALREADY_DEFINED ) &&
parsedElement.hasParseException( Res.MSG_SETTER_FOR_PROPERTY_ALREADY_DEFINED ) )
{
//noinspection ThrowableResultOfMethodCallIgnored
parsedElement.removeParseException( Res.MSG_FUNCTION_ALREADY_DEFINED );
}
dps.setSetterDfs( dfs );
}
return dps;
}
@SuppressWarnings({"ConstantConditions"})
private FunctionStatement parseBaseConstructorDefinition( boolean bConstructor, List<IGosuAnnotation> defnAnnotations, ClassScopeCache scopeCache )
{
final IGosuClassInternal gsClass = getGosuClass();
Token T = new Token();
getSymbolTable().pushScope();
try
{
String strFunctionName;
if( bConstructor )
{
strFunctionName = gsClass.getRelativeName();
}
else
{
match( T, SourceCodeTokenizer.TT_WORD );
strFunctionName = T._strValue;
}
// String strNameInSource = T._strValue == null ? "" : T._strValue;
// getOwner().addNameInDeclaration( strFunctionName, iOffsetName-9, iLineName, iColumnName, true );
// Since we're going with a two-pass approach the symbols will already be in the table, but w/o values.
// So we don't want to check for already-defined functions here -- we're going to overwrite them with
// these identical symbols, but with values.
//verify( _symTable.getSymbol( strFunctionName ) == null, strFunctionName + Res.MSG_VARIABLE_ALREADY_DEFINED ) );
match( null, '(' );
List<ISymbol> args;
IType[] argTypes;
FunctionStatement functionStmt = new ConstructorStatement( bConstructor );
int iOffsetParamList = getTokenizer().getTokenStart();
int iColumnParamList = getTokenizer().getTokenColumn();
int iLineParamList = getTokenizer().getLineNumber();
if( !match( null, null, ')', true ) )
{
pushClassSymbols( false, scopeCache );
try
{
args = getOwner().parseParameterDeclarationList( functionStmt, false, null );
}
finally
{
popClassSymbols();
}
argTypes = new IType[args.size()];
for( int i = 0; i < args.size(); i++ )
{
getSymbolTable().putSymbol( args.get( i ) );
argTypes[i] = args.get( i ).getType();
}
}
else
{
argTypes = IType.EMPTY_ARRAY;
args = Collections.emptyList();
pushExpression( new ParameterListClause() );
setLocation( iOffsetParamList, iLineParamList, iColumnParamList, getTokenizer().getTokenStart() <= iOffsetParamList, true );
popExpression();
}
match( null, ')' );
if( match( null, ":", SourceCodeTokenizer.TT_OPERATOR ) )
{
getOwner().parseTypeLiteral();
Expression expression = popExpression();
verify( expression, false, Res.MSG_NO_TYPE_AFTER_CONSTRUCTOR );
}
StatementList stmtList;
int iOffset = getOwner().getTokenizer().getTokenStart();
int iLineNum = getOwner().getTokenizer().getLineNumber();
int iColumn = getOwner().getTokenizer().getTokenColumn();
FunctionType ft = new FunctionType( gsClass.getRelativeName(), gsClass, argTypes );
ft.setScriptPart( getOwner().getScriptPart() );
getOwner().pushParsingFunction( ft );
DynamicFunctionSymbol dfsDecl = findConstructorFunction( gsClass, DynamicFunctionSymbol.getSignatureName( strFunctionName, args ) );
dfsDecl = (dfsDecl == null || dfsDecl.getType() == GosuTypes.DEF_CTOR_TYPE()) ? null : dfsDecl;
functionStmt = dfsDecl == null ? functionStmt : dfsDecl.getDeclFunctionStmt();
verify( functionStmt, dfsDecl != null, Res.MSG_EXPECTING_NAME_FUNCTION_DEF );
if( verify( functionStmt, match( null, '{' ), Res.MSG_EXPECTING_OPEN_BRACE_FOR_CONSTRUCTOR_DEF ) )
{
IGosuClassInternal superClass = gsClass.getSuperClass();
if( superClass != null )
{
if( gsClass.isAnonymous() )
{
List<? extends IConstructorInfo> declaredConstructors = gsClass.getTypeInfo().getDeclaredConstructors();
if( verifyCallSiteCtorImpled( functionStmt, declaredConstructors ) )
{
verify( functionStmt, declaredConstructors.size() <= 1, Res.MSG_SINGLE_ANON_CTOR );
}
}
// If it's an enum, there's no default super constructor: the enum class extends the Enum java class
// which requires a String and an int. Those arguments are automatically generated by the compiler.
if( gsClass.getSupertype().getGenericType() != JavaTypes.ENUM() )
{
DynamicFunctionSymbol superDefaultConstructor = superClass.getDefaultConstructor();
verify( functionStmt,
match( T, Keyword.KW_super, true ) ||
match( T, Keyword.KW_this, true ) ||
(superDefaultConstructor != null && superClass.isAccessible( getGosuClass(), superDefaultConstructor )),
Res.MSG_NO_DEFAULT_CTOR_IN, superClass.getName() );
}
}
else if( gsClass.isAnonymous() ) // anon on interface
{
if( verify( functionStmt, gsClass.getTypeInfo().getDeclaredConstructors().size() <= 1, Res.MSG_SINGLE_ANON_CTOR ) )
{
verify( functionStmt, argTypes.length == 0, Res.MSG_ANON_CTOR_PARAMS_CONFLICT_WITH_CALL_SITE );
}
}
// No need to push an isolated scope here because there are no indexed
// symbol involved. This scope is only to resolve relative constructor
// calls from within a constructor e.g., this( foo ), super( foo ), etc.
boolean bMoreStatements = true;
MethodCallStatement initializer = null;
boolean bSuperOrThisCall = (match( T, Keyword.KW_super, true ) || match( T, Keyword.KW_this, true )) && getTokenizer().lookaheadType( 1, true ) == '(';
if( bSuperOrThisCall )
{
// Has to be static scope here since the JVM verifier prevents explicitly passing 'this' to super()
pushClassSymbols( true, scopeCache );
try
{
putSuperAndThisConstructorSymbols();
// Push static class members in case they are referenced as args in super( xxx ) or this( xxx )
bMoreStatements = getOwner().parseStatement();
initializer = (MethodCallStatement)popStatement();
}
finally
{
popClassSymbols();
}
}
else if( superClass != null )
{
MethodCallExpression e = new MethodCallExpression();
e.setParent( getClassStatement() );
DynamicFunctionSymbol defaultSuperConstructor;
// Enums implicitly call a super function that takes a String and an int, not a no-arg method
if( gsClass.getSupertype().getGenericType() == JavaTypes.ENUM() )
{
defaultSuperConstructor = superClass.getConstructorFunction( "Enum(java.lang.String, int)" );
}
else
{
defaultSuperConstructor = superClass.getDefaultConstructor();
}
if( defaultSuperConstructor != null )
{
e.setFunctionSymbol( new SuperConstructorFunctionSymbol( defaultSuperConstructor ) );
e.setArgs( null );
e.setType( GosuParserTypes.NULL_TYPE() );
initializer = new MethodCallStatement();
initializer.setMethodCall( e );
e.setParent( initializer );
initializer.setParent( functionStmt );
}
}
else
{
MethodCallExpression e = new MethodCallExpression();
e.setParent( getClassStatement() );
e.setFunctionSymbol( new InitConstructorFunctionSymbol( getSymbolTable() ) );
e.setArgs( null );
e.setType( GosuParserTypes.NULL_TYPE() );
initializer = new MethodCallStatement();
initializer.setMethodCall( e );
e.setParent( initializer );
initializer.setParent( functionStmt );
}
ArrayList<Statement> statements = new ArrayList<Statement>( 8 );
if( bMoreStatements )
{
pushClassSymbols( false, scopeCache );
getOwner().pushParsingAbstractConstructor( getClassStatement().getGosuClass().isAbstract() );
getSymbolTable().pushScope();
try
{
getSymbolTable().putSymbol( new Symbol( Keyword.KW_this.getName(), TypeLord.getConcreteType( gsClass ), getSymbolTable(), null ) );
getSymbolTable().putSymbol( new Symbol( Keyword.KW_super.getName(),
superClass == null ? IGosuClassInternal.Util.getGosuClassFrom( JavaTypes.OBJECT() ) :
superClass, getSymbolTable(), null ) );
getOwner().parseStatementsAndDetectUnreachable( statements );
}
finally
{
getSymbolTable().popScope();
getOwner().popParsingAbstractConstructor();
popClassSymbols();
}
}
verify( functionStmt, match( null, '}' ), Res.MSG_EXPECTING_CLOSE_BRACE_FOR_CONSTRUCTOR_DEF );
stmtList = new StatementList( getSymbolTable() );
stmtList.setStatements( statements );
Statement statement = isDontOptimizeStatementLists() ? stmtList : stmtList.getSelfOrSingleStatement();
if( statement == stmtList )
{
pushStatement( statement );
setLocation( iOffset, iLineNum, iColumn );
popStatement();
}
if( dfsDecl != null )
{
dfsDecl.setArgs( args );
dfsDecl.setValueDirectly( statement );
dfsDecl.setInitializer( initializer );
dfsDecl.getModifierInfo().setAnnotations( defnAnnotations );
}
}
else
{
eatStatementBlock( functionStmt, Res.MSG_EXPECTING_OPEN_BRACE_FOR_FUNCTION_DEF );
NotAStatement nas = new NotAStatement();
pushStatement( nas );
setLocation( iOffset, iLineNum, iColumn );
popStatement();
if( dfsDecl != null )
{
dfsDecl.setArgs( args );
dfsDecl.setValueDirectly( nas);
dfsDecl.getModifierInfo().setAnnotations( defnAnnotations );
}
}
getOwner().pushDynamicFunctionSymbol( dfsDecl );
if( functionStmt != null )
{
functionStmt.setDynamicFunctionSymbol( dfsDecl );
pushStatement( functionStmt );
}
return functionStmt;
}
finally
{
getSymbolTable().popScope();
if( getOwner().isParsingFunction() )
{
getOwner().popParsingFunction();
}
}
}
private boolean verifyCallSiteCtorImpled( FunctionStatement functionStmt, List<? extends IConstructorInfo> declaredConstructors )
{
if( declaredConstructors.size() != 2 )
{
return true;
}
for( IConstructorInfo ctor: declaredConstructors )
{
if( ctor instanceof GosuConstructorInfo )
{
if( !verify( functionStmt, !((GosuConstructorInfo)ctor).getDfs().getType().getName().equals( GosuTypes.DEF_CTOR_TYPE().getName() ), Res.MSG_ANON_CTOR_PARAMS_CONFLICT_WITH_CALL_SITE ) )
{
// The ctor from the call site is on super, but not impled by this ctor, therefore it implements the wrong one
return false;
}
}
}
return true;
}
private DynamicFunctionSymbol findConstructorFunction( IGosuClassInternal gsClass, String signatureName )
{
gsClass.compileDeclarationsIfNeeded();
DynamicFunctionSymbol dfs = gsClass.getParseInfo().getConstructorFunctions().get( signatureName );
if( dfs != null && dfs.getValueDirectly() != null )
{
dfs = GosuParser.assignPossibleDuplicateDfs( dfs, gsClass.getParseInfo().getConstructorFunctions().values() );
}
return dfs;
}
/**
* Alias super's ctors and this class's ctors as super(xxx) and this(xxx).
*/
private void putSuperAndThisConstructorSymbols()
{
IGosuClassInternal thisClass = getGosuClass();
IGosuClassInternal superClass = thisClass.getSuperClass();
if( superClass != null )
{
for( DynamicFunctionSymbol dfs : superClass.getConstructorFunctions() )
{
if( superClass.isAccessible( getGosuClass(), dfs ) )
{
dfs = new SuperConstructorFunctionSymbol( superClass.isParameterizedType()
? dfs.getParameterizedVersion( superClass )
: dfs );
getSymbolTable().putSymbol( dfs );
getOwner().putDfsDeclInSetByName( dfs );
}
}
}
for( DynamicFunctionSymbol dfs : thisClass.getConstructorFunctions() )
{
dfs = new ThisConstructorFunctionSymbol( dfs );
getSymbolTable().putSymbol( dfs );
getOwner().putDfsDeclInSetByName( dfs );
}
}
private boolean isCyclicInheritance( IType superType, IGosuClassInternal gsClass )
{
if( TypeLord.getPureGenericType( superType ) == gsClass )
{
return true;
}
if( superType != null && superType instanceof IGosuClassInternal )
{
if( isCyclicInheritance( ((IGosuClassInternal)superType).getSuperClass(), gsClass ) )
{
return true;
}
if( isCyclicInheritance( ((IGosuClassInternal)superType).getEnclosingType(), gsClass ) )
{
return true;
}
}
return superType instanceof IGosuClassInternal &&
isCyclicInterfaceInheritance( (IGosuClassInternal)superType, gsClass );
}
private boolean isCyclicInterfaceInheritance( IGosuClassInternal gsExtendee, IGosuClass gsExtendor )
{
if( gsExtendee == gsExtendor )
{
return true;
}
IType[] interfaces = gsExtendee.getInterfaces();
for( int i = 0; i < interfaces.length; i++ )
{
IType type = interfaces[i];
if( type instanceof ErrorType )
{
return false;
}
IGosuClassInternal gsClass = IGosuClassInternal.Util.getGosuClassFrom( type );
if( isCyclicInterfaceInheritance( gsClass, gsExtendor ) )
{
return true;
}
}
return false;
}
@Override
IGosuClassInternal getGosuClass()
{
return (IGosuClassInternal)super.getGosuClass();
}
@Override
public String toString()
{
IGosuClassInternal gosuClass = getGosuClass();
return "Parsing Class: " + (gosuClass == null ? "null" : gosuClass.getName());
}
}
| fix npe
| gosu-core/src/main/java/gw/internal/gosu/parser/GosuClassParser.java | fix npe |
|
Java | apache-2.0 | 9052e638079218e55782672394040f0fbcf5b203 | 0 | anchela/jackrabbit-oak,trekawek/jackrabbit-oak,apache/jackrabbit-oak,mreutegg/jackrabbit-oak,amit-jain/jackrabbit-oak,amit-jain/jackrabbit-oak,trekawek/jackrabbit-oak,mreutegg/jackrabbit-oak,apache/jackrabbit-oak,trekawek/jackrabbit-oak,apache/jackrabbit-oak,amit-jain/jackrabbit-oak,mreutegg/jackrabbit-oak,trekawek/jackrabbit-oak,anchela/jackrabbit-oak,mreutegg/jackrabbit-oak,mreutegg/jackrabbit-oak,apache/jackrabbit-oak,apache/jackrabbit-oak,amit-jain/jackrabbit-oak,trekawek/jackrabbit-oak,anchela/jackrabbit-oak,anchela/jackrabbit-oak,anchela/jackrabbit-oak,amit-jain/jackrabbit-oak | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.jcr;
import org.apache.jackrabbit.commons.AbstractSession;
import org.apache.jackrabbit.mk.model.NodeState;
import org.apache.jackrabbit.mk.model.NodeStateEditor;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.Connection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.ContentHandler;
import javax.jcr.Credentials;
import javax.jcr.InvalidItemStateException;
import javax.jcr.Node;
import javax.jcr.Repository;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.jcr.UnsupportedRepositoryOperationException;
import javax.jcr.ValueFactory;
import javax.jcr.Workspace;
import javax.jcr.retention.RetentionManager;
import javax.jcr.security.AccessControlManager;
import java.io.IOException;
import java.security.AccessControlException;
/**
* {@code SessionImpl}...
*/
public class SessionImpl extends AbstractSession {
/**
* logger instance
*/
private static final Logger log = LoggerFactory.getLogger(SessionImpl.class);
private final Repository repository;
private final Workspace workspace;
private final Connection connection;
private final ValueFactory valueFactory;
private final GlobalContext globalContext;
private final SessionContext<SessionImpl> sessionContext = new Context();
private NodeStateEditor editor;
private boolean isAlive = true;
SessionImpl(GlobalContext globalContext, Repository repository, Connection connection) {
this.globalContext = globalContext;
this.repository = repository;
this.connection = connection;
editor = connection.getNodeStateEditor(connection.getCurrentRoot());
valueFactory = new ValueFactoryImpl();
workspace = new WorkspaceImpl(sessionContext);
}
//------------------------------------------------------------< Session >---
@Override
public Repository getRepository() {
return repository;
}
@Override
public String getUserID() {
return connection.getAuthInfo().getUserID();
}
@Override
public String[] getAttributeNames() {
return connection.getAuthInfo().getAttributeNames();
}
@Override
public Object getAttribute(String name) {
return connection.getAuthInfo().getAttribute(name);
}
@Override
public Workspace getWorkspace() {
return workspace;
}
/**
* @see javax.jcr.Session#impersonate(Credentials)
*/
@Override
public Session impersonate(Credentials credentials) throws RepositoryException {
checkIsAlive();
// TODO
return null;
}
@Override
public ValueFactory getValueFactory() throws RepositoryException {
checkIsAlive();
return sessionContext.getValueFactory();
}
//------------------------------------------------------------< Reading >---
@Override
public Node getRootNode() throws RepositoryException {
checkIsAlive();
return new NodeImpl(sessionContext, editor);
}
@Override
public Node getNodeByUUID(String uuid) throws RepositoryException {
checkIsAlive();
// TODO
return null;
}
@Override
public Node getNodeByIdentifier(String id) throws RepositoryException {
checkIsAlive();
// TODO
return null;
}
//------------------------------------------------------------< Writing >---
@Override
public void move(String srcAbsPath, String destAbsPath) throws RepositoryException {
checkIsAlive();
// todo implement move
}
//------------------------------------------------------------< state >---
@Override
public void save() throws RepositoryException {
checkIsAlive();
try {
NodeState newState = connection.commit(editor);
editor = connection.getNodeStateEditor(newState);
} catch (CommitFailedException e) {
throw new RepositoryException(e);
}
}
@Override
public void refresh(boolean keepChanges) throws RepositoryException {
checkIsAlive();
try {
connection.commit(connection.getNodeStateEditor(connection.getCurrentRoot())); // todo: need a better way to update a connection to head
} catch (CommitFailedException e) {
throw new RepositoryException(e);
}
}
@Override
public boolean hasPendingChanges() throws RepositoryException {
checkIsAlive();
// todo implement hasPendingChanges
return false;
}
//----------------------------------------------------------< Lifecycle >---
@Override
public boolean isLive() {
return isAlive;
}
@Override
public void logout() {
if (!isAlive) {
// ignore
return;
}
isAlive = false;
// TODO
try {
connection.close();
} catch (IOException e) {
log.warn("Error while closing connection", e);
}
}
//----------------------------------------------------< Import / Export >---
@Override
public ContentHandler getImportContentHandler(String parentAbsPath, int uuidBehavior) throws RepositoryException {
checkIsAlive();
// TODO
return null;
}
//------------------------------------------------------------< Locking >---
/**
* @see javax.jcr.Session#addLockToken(String)
*/
@Override
public void addLockToken(String lt) {
try {
getWorkspace().getLockManager().addLockToken(lt);
} catch (RepositoryException e) {
log.warn("Unable to add lock token '{}' to this session: {}", lt, e.getMessage());
}
}
/**
* @see javax.jcr.Session#getLockTokens()
*/
@Override
public String[] getLockTokens() {
try {
return getWorkspace().getLockManager().getLockTokens();
} catch (RepositoryException e) {
log.warn("Unable to retrieve lock tokens for this session: {}", e.getMessage());
return new String[0]; }
}
/**
* @see javax.jcr.Session#removeLockToken(String)
*/
@Override
public void removeLockToken(String lt) {
try {
getWorkspace().getLockManager().addLockToken(lt);
} catch (RepositoryException e) {
log.warn("Unable to add lock token '{}' to this session: {}", lt, e.getMessage());
}
}
//------------------------------------------------------< AccessControl >---
@Override
public boolean hasPermission(String absPath, String actions) throws RepositoryException {
checkIsAlive();
// TODO
return false;
}
/**
* @see javax.jcr.Session#checkPermission(String, String)
*/
@Override
public void checkPermission(String absPath, String actions) throws AccessControlException, RepositoryException {
if (!hasPermission(absPath, actions)) {
throw new AccessControlException("Access control violation: path = " + absPath + ", actions = " + actions);
}
}
@Override
public boolean hasCapability(String methodName, Object target, Object[] arguments) throws RepositoryException {
checkIsAlive();
// TODO
return false;
}
@Override
public AccessControlManager getAccessControlManager() throws RepositoryException {
checkIsAlive();
// TODO
return null;
}
//----------------------------------------------------------< Retention >---
@Override
public RetentionManager getRetentionManager() throws RepositoryException {
throw new UnsupportedRepositoryOperationException("Retention Management is not supported.");
}
//--------------------------------------------------------------------------
/**
* Performs a sanity check on this session.
*
* @throws RepositoryException if this session has been rendered invalid
* for some reason (e.g. if this session has been closed explicitly by logout)
*/
void checkIsAlive() throws RepositoryException {
// check session status
if (!isAlive) {
throw new RepositoryException("This session has been closed.");
}
}
/**
* Returns true if the repository supports the given option. False otherwise.
*
* @param option Any of the option constants defined by {@link Repository}
* that either returns 'true' or 'false'. I.e.
* <ul>
* <li>{@link Repository#LEVEL_1_SUPPORTED}</li>
* <li>{@link Repository#LEVEL_2_SUPPORTED}</li>
* <li>{@link Repository#OPTION_ACCESS_CONTROL_SUPPORTED}</li>
* <li>{@link Repository#OPTION_ACTIVITIES_SUPPORTED}</li>
* <li>{@link Repository#OPTION_BASELINES_SUPPORTED}</li>
* <li>{@link Repository#OPTION_JOURNALED_OBSERVATION_SUPPORTED}</li>
* <li>{@link Repository#OPTION_LIFECYCLE_SUPPORTED}</li>
* <li>{@link Repository#OPTION_LOCKING_SUPPORTED}</li>
* <li>{@link Repository#OPTION_NODE_AND_PROPERTY_WITH_SAME_NAME_SUPPORTED}</li>
* <li>{@link Repository#OPTION_NODE_TYPE_MANAGEMENT_SUPPORTED}</li>
* <li>{@link Repository#OPTION_OBSERVATION_SUPPORTED}</li>
* <li>{@link Repository#OPTION_QUERY_SQL_SUPPORTED}</li>
* <li>{@link Repository#OPTION_RETENTION_SUPPORTED}</li>
* <li>{@link Repository#OPTION_SHAREABLE_NODES_SUPPORTED}</li>
* <li>{@link Repository#OPTION_SIMPLE_VERSIONING_SUPPORTED}</li>
* <li>{@link Repository#OPTION_TRANSACTIONS_SUPPORTED}</li>
* <li>{@link Repository#OPTION_UNFILED_CONTENT_SUPPORTED}</li>
* <li>{@link Repository#OPTION_UPDATE_MIXIN_NODE_TYPES_SUPPORTED}</li>
* <li>{@link Repository#OPTION_UPDATE_PRIMARY_NODE_TYPE_SUPPORTED}</li>
* <li>{@link Repository#OPTION_VERSIONING_SUPPORTED}</li>
* <li>{@link Repository#OPTION_WORKSPACE_MANAGEMENT_SUPPORTED}</li>
* <li>{@link Repository#OPTION_XML_EXPORT_SUPPORTED}</li>
* <li>{@link Repository#OPTION_XML_IMPORT_SUPPORTED}</li>
* <li>{@link Repository#WRITE_SUPPORTED}</li>
* </ul>
* @return true if the repository supports the given option. False otherwise.
*/
boolean isSupportedOption(String option) {
String desc = getRepository().getDescriptor(option);
// if the descriptors are not available return true. the missing
// functionality of the given SPI impl will in this case be detected
// upon the corresponding SPI call (see JCR-3143).
return (desc == null) ? true : Boolean.valueOf(desc);
}
/**
* Make sure the repository supports the option indicated by the given string.
*
* @param option Any of the option constants defined by {@link Repository}
* that either returns 'true' or 'false'. I.e.
* <ul>
* <li>{@link Repository#LEVEL_1_SUPPORTED}</li>
* <li>{@link Repository#LEVEL_2_SUPPORTED}</li>
* <li>{@link Repository#OPTION_ACCESS_CONTROL_SUPPORTED}</li>
* <li>{@link Repository#OPTION_ACTIVITIES_SUPPORTED}</li>
* <li>{@link Repository#OPTION_BASELINES_SUPPORTED}</li>
* <li>{@link Repository#OPTION_JOURNALED_OBSERVATION_SUPPORTED}</li>
* <li>{@link Repository#OPTION_LIFECYCLE_SUPPORTED}</li>
* <li>{@link Repository#OPTION_LOCKING_SUPPORTED}</li>
* <li>{@link Repository#OPTION_NODE_AND_PROPERTY_WITH_SAME_NAME_SUPPORTED}</li>
* <li>{@link Repository#OPTION_NODE_TYPE_MANAGEMENT_SUPPORTED}</li>
* <li>{@link Repository#OPTION_OBSERVATION_SUPPORTED}</li>
* <li>{@link Repository#OPTION_QUERY_SQL_SUPPORTED}</li>
* <li>{@link Repository#OPTION_RETENTION_SUPPORTED}</li>
* <li>{@link Repository#OPTION_SHAREABLE_NODES_SUPPORTED}</li>
* <li>{@link Repository#OPTION_SIMPLE_VERSIONING_SUPPORTED}</li>
* <li>{@link Repository#OPTION_TRANSACTIONS_SUPPORTED}</li>
* <li>{@link Repository#OPTION_UNFILED_CONTENT_SUPPORTED}</li>
* <li>{@link Repository#OPTION_UPDATE_MIXIN_NODE_TYPES_SUPPORTED}</li>
* <li>{@link Repository#OPTION_UPDATE_PRIMARY_NODE_TYPE_SUPPORTED}</li>
* <li>{@link Repository#OPTION_VERSIONING_SUPPORTED}</li>
* <li>{@link Repository#OPTION_WORKSPACE_MANAGEMENT_SUPPORTED}</li>
* <li>{@link Repository#OPTION_XML_EXPORT_SUPPORTED}</li>
* <li>{@link Repository#OPTION_XML_IMPORT_SUPPORTED}</li>
* <li>{@link Repository#WRITE_SUPPORTED}</li>
* </ul>
* @throws UnsupportedRepositoryOperationException If the given option is
* not supported.
* @throws RepositoryException If another error occurs.
* @see javax.jcr.Repository#getDescriptorKeys()
*/
void checkSupportedOption(String option) throws RepositoryException {
if (!isSupportedOption(option)) {
throw new UnsupportedRepositoryOperationException(option + " is not supported by this repository.");
}
}
/**
* Checks if this session has pending changes.
*
* @throws InvalidItemStateException if this nodes session has pending changes
* @throws RepositoryException
*/
void checkHasPendingChanges() throws RepositoryException {
// check for pending changes
if (hasPendingChanges()) {
String msg = "Unable to perform operation. Session has pending changes.";
log.debug(msg);
throw new InvalidItemStateException(msg);
}
}
//--------------------------------------------------------------------------
private class Context implements SessionContext<SessionImpl> {
@Override
public SessionImpl getSession() {
return SessionImpl.this;
}
@Override
public GlobalContext getGlobalContext() {
return globalContext;
}
@Override
public String getWorkspaceName() {
return connection.getWorkspaceName();
}
@Override
public Connection getConnection() {
return connection;
}
@Override
public ValueFactory getValueFactory() {
return valueFactory;
}
}
} | oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/SessionImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.jcr;
import org.apache.jackrabbit.commons.AbstractSession;
import org.apache.jackrabbit.mk.model.NodeState;
import org.apache.jackrabbit.mk.model.NodeStateEditor;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.Connection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.ContentHandler;
import javax.jcr.Credentials;
import javax.jcr.InvalidItemStateException;
import javax.jcr.Node;
import javax.jcr.Repository;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.jcr.UnsupportedRepositoryOperationException;
import javax.jcr.ValueFactory;
import javax.jcr.Workspace;
import javax.jcr.retention.RetentionManager;
import javax.jcr.security.AccessControlManager;
import java.io.IOException;
import java.security.AccessControlException;
/**
* {@code SessionImpl}...
*/
public class SessionImpl extends AbstractSession {
/**
* logger instance
*/
private static final Logger log = LoggerFactory.getLogger(SessionImpl.class);
private final Repository repository;
private final Workspace workspace;
private final Connection connection;
private final ValueFactory valueFactory;
private final GlobalContext globalContext;
private NodeStateEditor editor;
private boolean isAlive = true;
private final SessionContext<SessionImpl> sessionContext = new Context();
SessionImpl(GlobalContext globalContext, Repository repository, Connection connection) {
this.globalContext = globalContext;
this.repository = repository;
this.connection = connection;
editor = connection.getNodeStateEditor(connection.getCurrentRoot());
valueFactory = new ValueFactoryImpl();
workspace = new WorkspaceImpl(sessionContext);
}
//------------------------------------------------------------< Session >---
@Override
public Repository getRepository() {
return repository;
}
@Override
public String getUserID() {
return connection.getAuthInfo().getUserID();
}
@Override
public String[] getAttributeNames() {
return connection.getAuthInfo().getAttributeNames();
}
@Override
public Object getAttribute(String name) {
return connection.getAuthInfo().getAttribute(name);
}
@Override
public Workspace getWorkspace() {
return workspace;
}
/**
* @see javax.jcr.Session#impersonate(Credentials)
*/
@Override
public Session impersonate(Credentials credentials) throws RepositoryException {
checkIsAlive();
// TODO
return null;
}
@Override
public ValueFactory getValueFactory() throws RepositoryException {
checkIsAlive();
return sessionContext.getValueFactory();
}
//------------------------------------------------------------< Reading >---
@Override
public Node getRootNode() throws RepositoryException {
checkIsAlive();
return new NodeImpl(sessionContext, editor);
}
@Override
public Node getNodeByUUID(String uuid) throws RepositoryException {
checkIsAlive();
// TODO
return null;
}
@Override
public Node getNodeByIdentifier(String id) throws RepositoryException {
checkIsAlive();
// TODO
return null;
}
//------------------------------------------------------------< Writing >---
@Override
public void move(String srcAbsPath, String destAbsPath) throws RepositoryException {
checkIsAlive();
// todo implement move
}
//------------------------------------------------------------< state >---
@Override
public void save() throws RepositoryException {
checkIsAlive();
try {
NodeState newState = connection.commit(editor);
editor = connection.getNodeStateEditor(newState);
}
catch (CommitFailedException e) {
throw new RepositoryException(e);
}
}
@Override
public void refresh(boolean keepChanges) throws RepositoryException {
checkIsAlive();
try {
connection.commit(connection.getNodeStateEditor(connection.getCurrentRoot())); // todo: need a better way to update a connection to head
}
catch (CommitFailedException e) {
throw new RepositoryException(e);
}
}
@Override
public boolean hasPendingChanges() throws RepositoryException {
checkIsAlive();
// todo implement hasPendingChanges
return false;
}
//----------------------------------------------------------< Lifecycle >---
@Override
public boolean isLive() {
return isAlive;
}
@Override
public void logout() {
if (!isAlive) {
// ignore
return;
}
isAlive = false;
// TODO
try {
connection.close();
}
catch (IOException e) {
log.warn("Error while closing connection", e);
}
}
//----------------------------------------------------< Import / Export >---
@Override
public ContentHandler getImportContentHandler(String parentAbsPath, int uuidBehavior) throws RepositoryException {
checkIsAlive();
// TODO
return null;
}
//------------------------------------------------------------< Locking >---
/**
* @see javax.jcr.Session#addLockToken(String)
*/
@Override
public void addLockToken(String lt) {
try {
getWorkspace().getLockManager().addLockToken(lt);
} catch (RepositoryException e) {
log.warn("Unable to add lock token '{}' to this session: {}", lt, e.getMessage());
}
}
/**
* @see javax.jcr.Session#getLockTokens()
*/
@Override
public String[] getLockTokens() {
try {
return getWorkspace().getLockManager().getLockTokens();
} catch (RepositoryException e) {
log.warn("Unable to retrieve lock tokens for this session: {}", e.getMessage());
return new String[0]; }
}
/**
* @see javax.jcr.Session#removeLockToken(String)
*/
@Override
public void removeLockToken(String lt) {
try {
getWorkspace().getLockManager().addLockToken(lt);
} catch (RepositoryException e) {
log.warn("Unable to add lock token '{}' to this session: {}", lt, e.getMessage());
}
}
//------------------------------------------------------< AccessControl >---
@Override
public boolean hasPermission(String absPath, String actions) throws RepositoryException {
checkIsAlive();
// TODO
return false;
}
/**
* @see javax.jcr.Session#checkPermission(String, String)
*/
@Override
public void checkPermission(String absPath, String actions) throws AccessControlException, RepositoryException {
if (!hasPermission(absPath, actions)) {
throw new AccessControlException("Access control violation: path = " + absPath + ", actions = " + actions);
}
}
@Override
public boolean hasCapability(String methodName, Object target, Object[] arguments) throws RepositoryException {
checkIsAlive();
// TODO
return false;
}
@Override
public AccessControlManager getAccessControlManager() throws RepositoryException {
checkIsAlive();
// TODO
return null;
}
//----------------------------------------------------------< Retention >---
@Override
public RetentionManager getRetentionManager() throws RepositoryException {
throw new UnsupportedRepositoryOperationException("Retention Management is not supported.");
}
//--------------------------------------------------------------------------
/**
* Performs a sanity check on this session.
*
* @throws RepositoryException if this session has been rendered invalid
* for some reason (e.g. if this session has been closed explicitly by logout)
*/
void checkIsAlive() throws RepositoryException {
// check session status
if (!isAlive) {
throw new RepositoryException("This session has been closed.");
}
}
/**
* Returns true if the repository supports the given option. False otherwise.
*
* @param option Any of the option constants defined by {@link Repository}
* that either returns 'true' or 'false'. I.e.
* <ul>
* <li>{@link Repository#LEVEL_1_SUPPORTED}</li>
* <li>{@link Repository#LEVEL_2_SUPPORTED}</li>
* <li>{@link Repository#OPTION_ACCESS_CONTROL_SUPPORTED}</li>
* <li>{@link Repository#OPTION_ACTIVITIES_SUPPORTED}</li>
* <li>{@link Repository#OPTION_BASELINES_SUPPORTED}</li>
* <li>{@link Repository#OPTION_JOURNALED_OBSERVATION_SUPPORTED}</li>
* <li>{@link Repository#OPTION_LIFECYCLE_SUPPORTED}</li>
* <li>{@link Repository#OPTION_LOCKING_SUPPORTED}</li>
* <li>{@link Repository#OPTION_NODE_AND_PROPERTY_WITH_SAME_NAME_SUPPORTED}</li>
* <li>{@link Repository#OPTION_NODE_TYPE_MANAGEMENT_SUPPORTED}</li>
* <li>{@link Repository#OPTION_OBSERVATION_SUPPORTED}</li>
* <li>{@link Repository#OPTION_QUERY_SQL_SUPPORTED}</li>
* <li>{@link Repository#OPTION_RETENTION_SUPPORTED}</li>
* <li>{@link Repository#OPTION_SHAREABLE_NODES_SUPPORTED}</li>
* <li>{@link Repository#OPTION_SIMPLE_VERSIONING_SUPPORTED}</li>
* <li>{@link Repository#OPTION_TRANSACTIONS_SUPPORTED}</li>
* <li>{@link Repository#OPTION_UNFILED_CONTENT_SUPPORTED}</li>
* <li>{@link Repository#OPTION_UPDATE_MIXIN_NODE_TYPES_SUPPORTED}</li>
* <li>{@link Repository#OPTION_UPDATE_PRIMARY_NODE_TYPE_SUPPORTED}</li>
* <li>{@link Repository#OPTION_VERSIONING_SUPPORTED}</li>
* <li>{@link Repository#OPTION_WORKSPACE_MANAGEMENT_SUPPORTED}</li>
* <li>{@link Repository#OPTION_XML_EXPORT_SUPPORTED}</li>
* <li>{@link Repository#OPTION_XML_IMPORT_SUPPORTED}</li>
* <li>{@link Repository#WRITE_SUPPORTED}</li>
* </ul>
* @return true if the repository supports the given option. False otherwise.
*/
boolean isSupportedOption(String option) {
String desc = getRepository().getDescriptor(option);
// if the descriptors are not available return true. the missing
// functionality of the given SPI impl will in this case be detected
// upon the corresponding SPI call (see JCR-3143).
return (desc == null) ? true : Boolean.valueOf(desc);
}
/**
* Make sure the repository supports the option indicated by the given string.
*
* @param option Any of the option constants defined by {@link Repository}
* that either returns 'true' or 'false'. I.e.
* <ul>
* <li>{@link Repository#LEVEL_1_SUPPORTED}</li>
* <li>{@link Repository#LEVEL_2_SUPPORTED}</li>
* <li>{@link Repository#OPTION_ACCESS_CONTROL_SUPPORTED}</li>
* <li>{@link Repository#OPTION_ACTIVITIES_SUPPORTED}</li>
* <li>{@link Repository#OPTION_BASELINES_SUPPORTED}</li>
* <li>{@link Repository#OPTION_JOURNALED_OBSERVATION_SUPPORTED}</li>
* <li>{@link Repository#OPTION_LIFECYCLE_SUPPORTED}</li>
* <li>{@link Repository#OPTION_LOCKING_SUPPORTED}</li>
* <li>{@link Repository#OPTION_NODE_AND_PROPERTY_WITH_SAME_NAME_SUPPORTED}</li>
* <li>{@link Repository#OPTION_NODE_TYPE_MANAGEMENT_SUPPORTED}</li>
* <li>{@link Repository#OPTION_OBSERVATION_SUPPORTED}</li>
* <li>{@link Repository#OPTION_QUERY_SQL_SUPPORTED}</li>
* <li>{@link Repository#OPTION_RETENTION_SUPPORTED}</li>
* <li>{@link Repository#OPTION_SHAREABLE_NODES_SUPPORTED}</li>
* <li>{@link Repository#OPTION_SIMPLE_VERSIONING_SUPPORTED}</li>
* <li>{@link Repository#OPTION_TRANSACTIONS_SUPPORTED}</li>
* <li>{@link Repository#OPTION_UNFILED_CONTENT_SUPPORTED}</li>
* <li>{@link Repository#OPTION_UPDATE_MIXIN_NODE_TYPES_SUPPORTED}</li>
* <li>{@link Repository#OPTION_UPDATE_PRIMARY_NODE_TYPE_SUPPORTED}</li>
* <li>{@link Repository#OPTION_VERSIONING_SUPPORTED}</li>
* <li>{@link Repository#OPTION_WORKSPACE_MANAGEMENT_SUPPORTED}</li>
* <li>{@link Repository#OPTION_XML_EXPORT_SUPPORTED}</li>
* <li>{@link Repository#OPTION_XML_IMPORT_SUPPORTED}</li>
* <li>{@link Repository#WRITE_SUPPORTED}</li>
* </ul>
* @throws UnsupportedRepositoryOperationException If the given option is
* not supported.
* @throws RepositoryException If another error occurs.
* @see javax.jcr.Repository#getDescriptorKeys()
*/
void checkSupportedOption(String option) throws RepositoryException {
if (!isSupportedOption(option)) {
throw new UnsupportedRepositoryOperationException(option + " is not supported by this repository.");
}
}
/**
* Checks if this session has pending changes.
*
* @throws InvalidItemStateException if this nodes session has pending changes
* @throws RepositoryException
*/
void checkHasPendingChanges() throws RepositoryException {
// check for pending changes
if (hasPendingChanges()) {
String msg = "Unable to perform operation. Session has pending changes.";
log.debug(msg);
throw new InvalidItemStateException(msg);
}
}
//--------------------------------------------------------------------------
private class Context implements SessionContext<SessionImpl> {
@Override
public SessionImpl getSession() {
return SessionImpl.this;
}
@Override
public GlobalContext getGlobalContext() {
return globalContext;
}
@Override
public String getWorkspaceName() {
return connection.getWorkspaceName();
}
@Override
public Connection getConnection() {
return connection;
}
@Override
public ValueFactory getValueFactory() {
return valueFactory;
}
}
} | fix indention
git-svn-id: 67138be12999c61558c3dd34328380c8e4523e73@1311688 13f79535-47bb-0310-9956-ffa450edef68
| oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/SessionImpl.java | fix indention |
|
Java | apache-2.0 | daeb81cc280744ccaa7873551b8a47a3c8646029 | 0 | jenetics/jenetics,jenetics/jenetics,jenetics/jenetics,jenetics/jenetics,jenetics/jenetics,jenetics/jenetics,jenetics/jenetics | /*
* Java Genetic Algorithm Library (@!identifier!@).
* Copyright (c) @!year!@ Franz Wilhelmstötter
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*
* Author:
* Franz Wilhelmstötter ([email protected])
*
*/
package org.jenetics.stat;
/**
* Some statistical special functions.
*
* @author <a href="mailto:[email protected]">Franz Wilhelmstötter</a>
* @version $Id$
*/
@SuppressWarnings("javadoc")
final class math {
private math() {
throw new AssertionError("Don't create an 'math' instance.");
}
/**
* Return the <i>error function</i> of {@code z}. The fractional error of
* this implementation is less than 1.2E-7.
*
* @param z the value to calculate the error function for.
* @return the error function for {@code z}.
*/
public static double erf(final double z) {
final double t = 1.0/(1.0 + 0.5*Math.abs(z));
// Horner's method
final double result = 1 - t*Math.exp(
-z*z - 1.26551223 +
t*( 1.00002368 +
t*( 0.37409196 +
t*( 0.09678418 +
t*(-0.18628806 +
t*( 0.27886807 +
t*(-1.13520398 +
t*( 1.48851587 +
t*(-0.82215223 +
t*(0.17087277))))))))));
return z >= 0 ? result : -result;
}
/**
* Return φ(x), the standard Gaussian pdf.
*
* @see #φ(double)
* @param x the value to calculate φ for.
* @return the φ value for x.
*/
public static double phi(final double x) {
return Math.exp(-x*x/2.0) / Math.sqrt(2.0*Math.PI);
}
/**
* Return φ(x), the standard Gaussian pdf.
*
* @see #phi(double)
* @param x the value to calculate φ for.
* @return the φ value for x.
*/
public static double φ(final double x) {
return phi(x);
}
/**
* Return φ(x, µ, σ), the standard Gaussian pdf with mean µ and stddev σ.
*
* @see #phi(double, double, double)
* @param x the value to calculate φ for.
* @param mu the mean value.
* @param sigma the stddev.
* @return the φ value for x.
*/
public static double phi(final double x, final double mu, final double sigma) {
return phi((x - mu)/sigma)/sigma;
}
/**
* Return φ(x, µ, σ), the standard Gaussian pdf with mean µ and stddev σ.
*
* @see #phi(double, double, double)
* @param x the value to calculate φ for.
* @param µ the mean value.
* @param σ the stddev.
* @return the φ value for x.
*/
public static double φ(final double x, final double µ, final double σ) {
return phi(x, µ, σ);
}
/**
* Return Φ(z), the standard Gaussian cdf using Taylor approximation.
*
* @param z the value to calculate Φ for.
* @return the Φ for value z.
*/
public static double Phi(final double z) {
if (z < -8.0) {
return 0.0;
}
if (z > 8.0) {
return 1.0;
}
double s = 0.0;
double t = z;
for (int i = 3; s + t != s; i += 2) {
s = s + t;
t = t*z*z/i;
}
return 0.5 + s*phi(z);
}
/**
* Return Φ(z), the standard Gaussian cdf using Taylor approximation.
*
* @param z the value to calculate Φ for.
* @return the Φ for value z.
*/
public static double Φ(final double z) {
return Phi(z);
}
/**
* Return Φ(z, µ, σ), the standard Gaussian cdf with mean µ and stddev σ.
*
* @see #phi(double, double, double)
* @param z the value to calculate Φ for.
* @param mu the mean value.
* @param sigma the stddev.
* @return the φ value for x.
*/
public static double Phi(final double z, final double mu, final double sigma) {
return Phi((z - mu)/sigma);
}
/**
* Return Φ(z, µ, σ), the standard Gaussian cdf with mean µ and stddev σ.
*
* @see #phi(double, double, double)
* @param z the value to calculate Φ for.
* @param µ the mean value.
* @param σ the stddev.
* @return the φ value for x.
*/
public static double Φ(final double z, final double µ, final double σ) {
return Phi(z, µ, σ);
}
}
| src/main/java/org/jenetics/stat/math.java | /*
* Java Genetic Algorithm Library (@!identifier!@).
* Copyright (c) @!year!@ Franz Wilhelmstötter
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*
* Author:
* Franz Wilhelmstötter ([email protected])
*
*/
package org.jenetics.stat;
/**
* Some statistical special functions.
*
* @author <a href="mailto:[email protected]">Franz Wilhelmstötter</a>
* @version $Id$
*/
final class math {
private math() {
throw new AssertionError("Don't create an 'math' instance.");
}
/**
* Return the <i>error function</i> of {@code z}. The fractional error of
* this implementation is less than 1.2E-7.
*
* @param z the value to calculate the error function for.
* @return the error function for {@code z}.
*/
static double erf(final double z) {
final double t = 1.0/(1.0 + 0.5*Math.abs(z));
// Horner's method
final double result = 1 - t*Math.exp(
-z*z - 1.26551223 +
t*( 1.00002368 +
t*( 0.37409196 +
t*( 0.09678418 +
t*(-0.18628806 +
t*( 0.27886807 +
t*(-1.13520398 +
t*( 1.48851587 +
t*(-0.82215223 +
t*(0.17087277))))))))));
return z >= 0 ? result : -result;
}
/**
* Return φ(x), the standard Gaussian pdf.
*
* @see #φ(double)
* @param x the value to calculate φ for.
* @return the φ value for x.
*/
static double phi(final double x) {
return Math.exp(-x*x/2.0) / Math.sqrt(2.0*Math.PI);
}
/**
* Return φ(x), the standard Gaussian pdf.
*
* @see #phi(double)
* @param x the value to calculate φ for.
* @return the φ value for x.
*/
static double φ(final double x) {
return phi(x);
}
/**
* Return φ(x, µ, σ), the standard Gaussian pdf with mean µ and stddev σ.
*
* @see #phi(double, double, double)
* @param x the value to calculate φ for.
* @param mu the mean value.
* @param sigma the stddev.
* @return the φ value for x.
*/
static double phi(final double x, final double mu, final double sigma) {
return phi((x - mu)/sigma)/sigma;
}
/**
* Return φ(x, µ, σ), the standard Gaussian pdf with mean µ and stddev σ.
*
* @see #phi(double, double, double)
* @param x the value to calculate φ for.
* @param µ the mean value.
* @param σ the stddev.
* @return the φ value for x.
*/
static double φ(final double x, final double µ, final double σ) {
return phi(x, µ, σ);
}
/**
* Return Φ(z), the standard Gaussian cdf using Taylor approximation.
*
* @param z the value to calculate Φ for.
* @return the Φ for value z.
*/
static double Phi(final double z) {
if (z < -8.0) {
return 0.0;
}
if (z > 8.0) {
return 1.0;
}
double s = 0.0;
double t = z;
for (int i = 3; s + t != s; i += 2) {
s = s + t;
t = t*z*z/i;
}
return 0.5 + s*phi(z);
}
/**
* Return Φ(z), the standard Gaussian cdf using Taylor approximation.
*
* @param z the value to calculate Φ for.
* @return the Φ for value z.
*/
static double Φ(final double z) {
return Phi(z);
}
/**
* Return Φ(z, µ, σ), the standard Gaussian cdf with mean µ and stddev σ.
*
* @see #phi(double, double, double)
* @param z the value to calculate Φ for.
* @param mu the mean value.
* @param sigma the stddev.
* @return the φ value for x.
*/
static double Phi(final double z, final double mu, final double sigma) {
return Phi((z - mu)/sigma);
}
/**
* Return Φ(z, µ, σ), the standard Gaussian cdf with mean µ and stddev σ.
*
* @see #phi(double, double, double)
* @param z the value to calculate Φ for.
* @param µ the mean value.
* @param σ the stddev.
* @return the φ value for x.
*/
static double Φ(final double z, final double µ, final double σ) {
return Phi(z, µ, σ);
}
}
| https://sourceforge.net/apps/trac/jenetics/ticket/39
Preparing version 0.8.1.0
| src/main/java/org/jenetics/stat/math.java | https://sourceforge.net/apps/trac/jenetics/ticket/39 Preparing version 0.8.1.0 |
|
Java | apache-2.0 | 25ad5edae220afeccd123a621079c465e3002350 | 0 | apache/commons-jexl,apache/commons-jexl,apache/commons-jexl | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.jexl3.internal;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.jexl3.JexlArithmetic;
import org.apache.commons.jexl3.JexlContext;
import org.apache.commons.jexl3.JexlEngine;
import org.apache.commons.jexl3.JexlException;
import org.apache.commons.jexl3.JexlException.VariableIssue;
import org.apache.commons.jexl3.JexlOperator;
import org.apache.commons.jexl3.JexlOptions;
import org.apache.commons.jexl3.introspection.JexlMethod;
import org.apache.commons.jexl3.introspection.JexlPropertyGet;
import org.apache.commons.jexl3.introspection.JexlPropertySet;
import org.apache.commons.jexl3.introspection.JexlUberspect;
import org.apache.commons.jexl3.parser.ASTArrayAccess;
import org.apache.commons.jexl3.parser.ASTAssignment;
import org.apache.commons.jexl3.parser.ASTFunctionNode;
import org.apache.commons.jexl3.parser.ASTIdentifier;
import org.apache.commons.jexl3.parser.ASTIdentifierAccess;
import org.apache.commons.jexl3.parser.ASTMethodNode;
import org.apache.commons.jexl3.parser.ASTReference;
import org.apache.commons.jexl3.parser.ASTVar;
import org.apache.commons.jexl3.parser.JexlNode;
import org.apache.commons.jexl3.parser.ParserVisitor;
import org.apache.commons.logging.Log;
/**
* The helper base of an interpreter of JEXL syntax.
* @since 3.0
*/
public abstract class InterpreterBase extends ParserVisitor {
/** The JEXL engine. */
protected final Engine jexl;
/** The logger. */
protected final Log logger;
/** The uberspect. */
protected final JexlUberspect uberspect;
/** The arithmetic handler. */
protected final JexlArithmetic arithmetic;
/** The context to store/retrieve variables. */
protected final JexlContext context;
/** The options. */
protected final JexlOptions options;
/** Cache executors. */
protected final boolean cache;
/** Cancellation support. */
protected final AtomicBoolean cancelled;
/** Empty parameters for method matching. */
protected static final Object[] EMPTY_PARAMS = new Object[0];
/** The namespace resolver. */
protected final JexlContext.NamespaceResolver ns;
/** The operators evaluation delegate. */
protected final Operators operators;
/** The map of 'prefix:function' to object resolving as namespaces. */
protected final Map<String, Object> functions;
/** The map of dynamically creates namespaces, NamespaceFunctor or duck-types of those. */
protected Map<String, Object> functors;
/**
* Creates an interpreter base.
* @param engine the engine creating this interpreter
* @param opts the evaluation options
* @param aContext the evaluation context
*/
protected InterpreterBase(Engine engine, JexlOptions opts, JexlContext aContext) {
this.jexl = engine;
this.logger = jexl.logger;
this.uberspect = jexl.uberspect;
this.context = aContext != null ? aContext : Engine.EMPTY_CONTEXT;
this.cache = engine.cache != null;
JexlArithmetic jexla = jexl.arithmetic;
this.options = opts == null? engine.options(aContext) : opts;
this.arithmetic = jexla.options(options);
if (arithmetic != jexla && !arithmetic.getClass().equals(jexla.getClass())) {
logger.warn("expected arithmetic to be " + jexla.getClass().getSimpleName()
+ ", got " + arithmetic.getClass().getSimpleName()
);
}
if (this.context instanceof JexlContext.NamespaceResolver) {
ns = ((JexlContext.NamespaceResolver) context);
} else {
ns = Engine.EMPTY_NS;
}
AtomicBoolean acancel = null;
if (this.context instanceof JexlContext.CancellationHandle) {
acancel = ((JexlContext.CancellationHandle) context).getCancellation();
}
this.cancelled = acancel != null? acancel : new AtomicBoolean(false);
Map<String,Object> ons = options.getNamespaces();
this.functions = ons.isEmpty()? jexl.functions : ons;
this.functors = null;
this.operators = new Operators(this);
}
/**
* Copy constructor.
* @param ii the base to copy
* @param jexla the arithmetic instance to use (or null)
*/
protected InterpreterBase(InterpreterBase ii, JexlArithmetic jexla) {
jexl = ii.jexl;
logger = ii.logger;
uberspect = ii.uberspect;
arithmetic = jexla;
context = ii.context;
options = ii.options.copy();
cache = ii.cache;
ns = ii.ns;
operators = ii.operators;
cancelled = ii.cancelled;
functions = ii.functions;
functors = ii.functors;
}
/**
* Attempt to call close() if supported.
* <p>This is used when dealing with auto-closeable (duck-like) objects
* @param closeable the object we'd like to close
*/
protected void closeIfSupported(Object closeable) {
if (closeable != null) {
JexlMethod mclose = uberspect.getMethod(closeable, "close", EMPTY_PARAMS);
if (mclose != null) {
try {
mclose.invoke(closeable, EMPTY_PARAMS);
} catch (Exception xignore) {
logger.warn(xignore);
}
}
}
}
/**
* Resolves a namespace, eventually allocating an instance using context as constructor argument.
* <p>
* The lifetime of such instances span the current expression or script evaluation.</p>
* @param prefix the prefix name (may be null for global namespace)
* @param node the AST node
* @return the namespace instance
*/
protected Object resolveNamespace(String prefix, JexlNode node) {
Object namespace;
// check whether this namespace is a functor
synchronized (this) {
if (functors != null) {
namespace = functors.get(prefix);
if (namespace != null) {
return namespace;
}
}
}
// check if namespace is a resolver
namespace = ns.resolveNamespace(prefix);
if (namespace == null) {
namespace = functions.get(prefix);
if (prefix != null && namespace == null) {
throw new JexlException(node, "no such function namespace " + prefix, null);
}
}
// shortcut if ns is known to be not-a-functor
final boolean cacheable = cache;
Object cached = cacheable ? node.jjtGetValue() : null;
if (cached != JexlContext.NamespaceFunctor.class) {
// allow namespace to instantiate a functor with context if possible, not an error otherwise
Object functor = null;
if (namespace instanceof JexlContext.NamespaceFunctor) {
functor = ((JexlContext.NamespaceFunctor) namespace).createFunctor(context);
} else if (namespace instanceof Class<?> || namespace instanceof String) {
// attempt to reuse last ctor cached in volatile JexlNode.value
if (cached instanceof JexlMethod) {
try {
Object eval = ((JexlMethod) cached).tryInvoke(null, context);
if (JexlEngine.TRY_FAILED != eval) {
functor = eval;
}
} catch (JexlException.TryFailed xtry) {
throw new JexlException(node, "unable to instantiate namespace " + prefix, xtry.getCause());
}
}
// find a ctor with that context class
if (functor == null) {
JexlMethod ctor = uberspect.getConstructor(namespace, context);
if (ctor != null) {
try {
functor = ctor.invoke(namespace, context);
if (cacheable && ctor.isCacheable()) {
node.jjtSetValue(ctor);
}
} catch (Exception xinst) {
throw new JexlException(node, "unable to instantiate namespace " + prefix, xinst);
}
}
// try again; find a ctor with no arg
if (functor == null) {
ctor = uberspect.getConstructor(namespace);
if (ctor != null) {
try {
functor = ctor.invoke(namespace);
} catch (Exception xinst) {
throw new JexlException(node, "unable to instantiate namespace " + prefix, xinst);
}
}
// try again; use a class, namespace of static methods
if (functor == null) {
// try to find a class with that name
if (namespace instanceof String) {
try {
namespace = uberspect.getClassLoader().loadClass((String) namespace);
} catch (ClassNotFoundException xignore) {
// not a class
namespace = null;
}
} // we know its a class
}
}
}
}
// got a functor, store it and return it
if (functor != null) {
synchronized (this) {
if (functors == null) {
functors = new HashMap<>();
}
functors.put(prefix, functor);
}
return functor;
} else {
// use the NamespaceFunctor class to tag this node as not-a-functor
node.jjtSetValue(JexlContext.NamespaceFunctor.class);
}
}
return namespace;
}
/**
* Defines a variable.
* @param var the variable to define
* @param frame the frame in which it will be defined
* @return true if definition succeeded, false otherwise
*/
protected boolean defineVariable(ASTVar var, LexicalFrame frame) {
int symbol = var.getSymbol();
if (symbol < 0) {
return false;
}
if (var.isRedefined()) {
return false;
}
return frame.defineSymbol(symbol, var.isCaptured());
}
/**
* Checks whether a variable is defined.
* <p>The var may be either a local variable declared in the frame and
* visible from the block or defined in the context.
* @param frame the frame
* @param block the block
* @param name the variable name
* @return true if variable is defined, false otherwise
*/
protected boolean isVariableDefined(Frame frame, LexicalScope block, String name) {
if (frame != null && block != null) {
Integer ref = frame.getScope().getSymbol(name);
int symbol = ref != null? ref : -1;
if (symbol >= 0 && block.hasSymbol(symbol)) {
Object value = frame.get(symbol);
return value != Scope.UNDEFINED && value != Scope.UNDECLARED;
}
}
return context.has(name);
}
/**
* Gets a value of a defined local variable or from the context.
* @param frame the local frame
* @param block the lexical block if any
* @param identifier the variable node
* @return the value
*/
protected Object getVariable(Frame frame, LexicalScope block, ASTIdentifier identifier) {
int symbol = identifier.getSymbol();
// if we have a symbol, we have a scope thus a frame
if (options.isLexicalShade() && identifier.isShaded()) {
return undefinedVariable(identifier, identifier.getName());
}
if (symbol >= 0) {
if (frame.has(symbol)) {
Object value = frame.get(symbol);
if (value != Scope.UNDEFINED) {
return value;
}
}
}
String name = identifier.getName();
Object value = context.get(name);
if (value == null && !context.has(name)) {
boolean ignore = (isSafe()
&& (symbol >= 0
|| identifier.jjtGetParent() instanceof ASTAssignment))
|| (identifier.jjtGetParent() instanceof ASTReference);
if (!ignore) {
return unsolvableVariable(identifier, name, true); // undefined
}
}
return value;
}
/**
* Sets a variable in the global context.
* <p>If interpretation applies lexical shade, the variable must exist (ie
* the context has(...) method returns true) otherwise an error occurs.
* @param node the node
* @param name the variable name
* @param value the variable value
*/
protected void setContextVariable(JexlNode node, String name, Object value) {
if (options.isLexicalShade() && !context.has(name)) {
throw new JexlException.Variable(node, name, true);
}
try {
context.set(name, value);
} catch (UnsupportedOperationException xsupport) {
throw new JexlException(node, "context is readonly", xsupport);
}
}
/**
* Whether this interpreter is currently evaluating with a strict engine flag.
* @return true if strict engine, false otherwise
*/
protected boolean isStrictEngine() {
return options.isStrict();
}
/**
* Whether this interpreter ignores null in navigation expression as errors.
* @return true if safe, false otherwise
*/
protected boolean isSafe() {
return options.isSafe();
}
/**
* Whether this interpreter is currently evaluating with a silent mode.
* @return true if silent, false otherwise
*/
protected boolean isSilent() {
return options.isSilent();
}
/**
* @return true if interrupt throws a JexlException.Cancel.
*/
protected boolean isCancellable() {
return options.isCancellable();
}
/**
* Finds the node causing a NPE for diadic operators.
* @param xrt the RuntimeException
* @param node the parent node
* @param left the left argument
* @param right the right argument
* @return the left, right or parent node
*/
protected JexlNode findNullOperand(RuntimeException xrt, JexlNode node, Object left, Object right) {
if (xrt instanceof JexlArithmetic.NullOperand) {
if (left == null) {
return node.jjtGetChild(0);
}
if (right == null) {
return node.jjtGetChild(1);
}
}
return node;
}
/**
* Triggered when a variable can not be resolved.
* @param node the node where the error originated from
* @param var the variable name
* @param undef whether the variable is undefined or null
* @return throws JexlException if strict and not silent, null otherwise
*/
protected Object unsolvableVariable(JexlNode node, String var, boolean undef) {
return variableError(node, var, undef? VariableIssue.UNDEFINED : VariableIssue.NULLVALUE);
}
/**
* Triggered when a variable is lexically known as undefined.
* @param node the node where the error originated from
* @param var the variable name
* @return throws JexlException if strict and not silent, null otherwise
*/
protected Object undefinedVariable(JexlNode node, String var) {
return variableError(node, var, VariableIssue.UNDEFINED);
}
/**
* Triggered when a variable is lexically known as being redefined.
* @param node the node where the error originated from
* @param var the variable name
* @return throws JexlException if strict and not silent, null otherwise
*/
protected Object redefinedVariable(JexlNode node, String var) {
return variableError(node, var, VariableIssue.REDEFINED);
}
/**
* Triggered when a variable generates an issue.
* @param node the node where the error originated from
* @param var the variable name
* @param issue the issue type
* @return throws JexlException if strict and not silent, null otherwise
*/
protected Object variableError(JexlNode node, String var, VariableIssue issue) {
if (isStrictEngine() && !node.isTernaryProtected()) {
throw new JexlException.Variable(node, var, issue);
} else if (logger.isDebugEnabled()) {
logger.debug(JexlException.variableError(node, var, issue));
}
return null;
}
/**
* Triggered when a method can not be resolved.
* @param node the node where the error originated from
* @param method the method name
* @return throws JexlException if strict and not silent, null otherwise
*/
protected Object unsolvableMethod(JexlNode node, String method) {
return unsolvableMethod(node, method, null);
}
/**
* Triggered when a method can not be resolved.
* @param node the node where the error originated from
* @param method the method name
* @param args the method arguments
* @return throws JexlException if strict and not silent, null otherwise
*/
protected Object unsolvableMethod(JexlNode node, String method, Object[] args) {
if (isStrictEngine()) {
throw new JexlException.Method(node, method, args);
} else if (logger.isDebugEnabled()) {
logger.debug(JexlException.methodError(node, method, args));
}
return null;
}
/**
* Triggered when a property can not be resolved.
* @param node the node where the error originated from
* @param property the property node
* @param cause the cause if any
* @param undef whether the property is undefined or null
* @return throws JexlException if strict and not silent, null otherwise
*/
protected Object unsolvableProperty(JexlNode node, String property, boolean undef, Throwable cause) {
if (isStrictEngine() && !node.isTernaryProtected()) {
throw new JexlException.Property(node, property, undef, cause);
} else if (logger.isDebugEnabled()) {
logger.debug(JexlException.propertyError(node, property, undef));
}
return null;
}
/**
* Checks whether a reference child node holds a local variable reference.
* @param node the reference node
* @param which the child we are checking
* @return true if child is local variable, false otherwise
*/
protected boolean isLocalVariable(ASTReference node, int which) {
return (node.jjtGetNumChildren() > which
&& node.jjtGetChild(which) instanceof ASTIdentifier
&& ((ASTIdentifier) node.jjtGetChild(which)).getSymbol() >= 0);
}
/**
* Checks whether a reference child node holds a function call.
* @param node the reference node
* @return true if child is function call, false otherwise
*/
protected boolean isFunctionCall(ASTReference node) {
return (node.jjtGetNumChildren() > 0
&& node.jjtGetChild(0) instanceof ASTFunctionNode);
}
/**
* Pretty-prints a failing property (de)reference.
* <p>Used by calls to unsolvableProperty(...).</p>
* @param node the property node
* @return the (pretty) string
*/
protected String stringifyProperty(JexlNode node) {
if (node instanceof ASTArrayAccess) {
return "["
+ stringifyPropertyValue(node.jjtGetChild(0))
+ "]";
}
if (node instanceof ASTMethodNode) {
return stringifyPropertyValue(node.jjtGetChild(0));
}
if (node instanceof ASTFunctionNode) {
return stringifyPropertyValue(node.jjtGetChild(0));
}
if (node instanceof ASTIdentifier) {
return ((ASTIdentifier) node).getName();
}
if (node instanceof ASTReference) {
return stringifyProperty(node.jjtGetChild(0));
}
return stringifyPropertyValue(node);
}
/**
* Pretty-prints a failing property value (de)reference.
* <p>Used by calls to unsolvableProperty(...).</p>
* @param node the property node
* @return the (pretty) string value
*/
protected static String stringifyPropertyValue(JexlNode node) {
return node != null? new Debugger().depth(1).data(node) : "???";
}
/**
* Triggered when an operator fails.
* @param node the node where the error originated from
* @param operator the method name
* @param cause the cause of error (if any)
* @return throws JexlException if strict and not silent, null otherwise
*/
protected Object operatorError(JexlNode node, JexlOperator operator, Throwable cause) {
if (isStrictEngine()) {
throw new JexlException.Operator(node, operator.getOperatorSymbol(), cause);
} else if (logger.isDebugEnabled()) {
logger.debug(JexlException.operatorError(node, operator.getOperatorSymbol()), cause);
}
return null;
}
/**
* Triggered when an annotation processing fails.
* @param node the node where the error originated from
* @param annotation the annotation name
* @param cause the cause of error (if any)
* @return throws a JexlException if strict and not silent, null otherwise
*/
protected Object annotationError(JexlNode node, String annotation, Throwable cause) {
if (isStrictEngine()) {
throw new JexlException.Annotation(node, annotation, cause);
} else if (logger.isDebugEnabled()) {
logger.debug(JexlException.annotationError(node, annotation), cause);
}
return null;
}
/**
* Triggered when method, function or constructor invocation fails with an exception.
* @param node the node triggering the exception
* @param methodName the method/function name
* @param xany the cause
* @return a JexlException that will be thrown
*/
protected JexlException invocationException(JexlNode node, String methodName, Throwable xany) {
Throwable cause = xany.getCause();
if (cause instanceof JexlException) {
return (JexlException) cause;
}
if (cause instanceof InterruptedException) {
return new JexlException.Cancel(node);
}
return new JexlException(node, methodName, xany);
}
/**
* Cancels this evaluation, setting the cancel flag that will result in a JexlException.Cancel to be thrown.
* @return false if already cancelled, true otherwise
*/
protected boolean cancel() {
return cancelled.compareAndSet(false, true);
}
/**
* Checks whether this interpreter execution was cancelled due to thread interruption.
* @return true if cancelled, false otherwise
*/
protected boolean isCancelled() {
return cancelled.get() | Thread.currentThread().isInterrupted();
}
/**
* Throws a JexlException.Cancel if script execution was cancelled.
* @param node the node being evaluated
*/
protected void cancelCheck(JexlNode node) {
if (isCancelled()) {
throw new JexlException.Cancel(node);
}
}
/**
* Concatenate arguments in call(...).
* <p>When target == context, we are dealing with a global namespace function call
* @param target the pseudo-method owner, first to-be argument
* @param narrow whether we should attempt to narrow number arguments
* @param args the other (non null) arguments
* @return the arguments array
*/
protected Object[] functionArguments(Object target, boolean narrow, Object[] args) {
// when target == context, we are dealing with the null namespace
if (target == null || target == context) {
if (narrow) {
arithmetic.narrowArguments(args);
}
return args;
}
// makes target 1st args, copy others - optionally narrow numbers
Object[] nargv = new Object[args.length + 1];
if (narrow) {
nargv[0] = functionArgument(true, target);
for (int a = 1; a <= args.length; ++a) {
nargv[a] = functionArgument(true, args[a - 1]);
}
} else {
nargv[0] = target;
System.arraycopy(args, 0, nargv, 1, args.length);
}
return nargv;
}
/**
* Concatenate arguments in call(...).
* @param target the pseudo-method owner, first to-be argument
* @param narrow whether we should attempt to narrow number arguments
* @param args the other (non null) arguments
* @return the arguments array
*/
protected Object[] callArguments(Object target, boolean narrow, Object[] args) {
// makes target 1st args, copy others - optionally narrow numbers
Object[] nargv = new Object[args.length + 1];
if (narrow) {
nargv[0] = functionArgument(true, target);
for (int a = 1; a <= args.length; ++a) {
nargv[a] = functionArgument(true, args[a - 1]);
}
} else {
nargv[0] = target;
System.arraycopy(args, 0, nargv, 1, args.length);
}
return nargv;
}
/**
* Optionally narrows an argument for a function call.
* @param narrow whether narrowing should occur
* @param arg the argument
* @return the narrowed argument
*/
protected Object functionArgument(boolean narrow, Object arg) {
return narrow && arg instanceof Number ? arithmetic.narrow((Number) arg) : arg;
}
/**
* Cached function call.
*/
protected static class Funcall implements JexlNode.Funcall {
/** Whether narrow should be applied to arguments. */
protected final boolean narrow;
/** The JexlMethod to delegate the call to. */
protected final JexlMethod me;
/**
* Constructor.
* @param jme the method
* @param flag the narrow flag
*/
protected Funcall(JexlMethod jme, boolean flag) {
this.me = jme;
this.narrow = flag;
}
/**
* Try invocation.
* @param ii the interpreter
* @param name the method name
* @param target the method target
* @param args the method arguments
* @return the method invocation result (or JexlEngine.TRY_FAILED)
*/
protected Object tryInvoke(InterpreterBase ii, String name, Object target, Object[] args) {
return me.tryInvoke(name, target, ii.functionArguments(null, narrow, args));
}
}
/**
* Cached arithmetic function call.
*/
protected static class ArithmeticFuncall extends Funcall {
/**
* Constructor.
* @param jme the method
* @param flag the narrow flag
*/
protected ArithmeticFuncall(JexlMethod jme, boolean flag) {
super(jme, flag);
}
@Override
protected Object tryInvoke(InterpreterBase ii, String name, Object target, Object[] args) {
return me.tryInvoke(name, ii.arithmetic, ii.functionArguments(target, narrow, args));
}
}
/**
* Cached context function call.
*/
protected static class ContextFuncall extends Funcall {
/**
* Constructor.
* @param jme the method
* @param flag the narrow flag
*/
protected ContextFuncall(JexlMethod jme, boolean flag) {
super(jme, flag);
}
@Override
protected Object tryInvoke(InterpreterBase ii, String name, Object target, Object[] args) {
return me.tryInvoke(name, ii.context, ii.functionArguments(target, narrow, args));
}
}
/**
* A ctor that needs a context as 1st argument.
*/
protected static class ContextualCtor extends Funcall {
/**
* Constructor.
* @param jme the method
* @param flag the narrow flag
*/
protected ContextualCtor(JexlMethod jme, boolean flag) {
super(jme, flag);
}
@Override
protected Object tryInvoke(InterpreterBase ii, String name, Object target, Object[] args) {
return me.tryInvoke(name, target, ii.callArguments(ii.context, narrow, args));
}
}
/**
* Helping dispatch function calls.
*/
protected class CallDispatcher {
/**
* The syntactic node.
*/
final JexlNode node;
/**
* Whether solution is cacheable.
*/
boolean cacheable = true;
/**
* Whether arguments have been narrowed.
*/
boolean narrow = false;
/**
* The method to call.
*/
JexlMethod vm = null;
/**
* The method invocation target.
*/
Object target = null;
/**
* The actual arguments.
*/
Object[] argv = null;
/**
* The cacheable funcall if any.
*/
Funcall funcall = null;
/**
* Dispatcher ctor.
*
* @param anode the syntactic node.
* @param acacheable whether resolution can be cached
*/
CallDispatcher(JexlNode anode, boolean acacheable) {
this.node = anode;
this.cacheable = acacheable;
}
/**
* Whether the method is a target method.
*
* @param ntarget the target instance
* @param mname the method name
* @param arguments the method arguments
* @return true if arithmetic, false otherwise
*/
protected boolean isTargetMethod(Object ntarget, String mname, final Object[] arguments) {
// try a method
vm = uberspect.getMethod(ntarget, mname, arguments);
if (vm != null) {
argv = arguments;
target = ntarget;
if (cacheable && vm.isCacheable()) {
funcall = new Funcall(vm, narrow);
}
return true;
}
return false;
}
/**
* Whether the method is a context method.
*
* @param mname the method name
* @param arguments the method arguments
* @return true if arithmetic, false otherwise
*/
protected boolean isContextMethod(String mname, final Object[] arguments) {
vm = uberspect.getMethod(context, mname, arguments);
if (vm != null) {
argv = arguments;
target = context;
if (cacheable && vm.isCacheable()) {
funcall = new ContextFuncall(vm, narrow);
}
return true;
}
return false;
}
/**
* Whether the method is an arithmetic method.
*
* @param mname the method name
* @param arguments the method arguments
* @return true if arithmetic, false otherwise
*/
protected boolean isArithmeticMethod(String mname, final Object[] arguments) {
vm = uberspect.getMethod(arithmetic, mname, arguments);
if (vm != null) {
argv = arguments;
target = arithmetic;
if (cacheable && vm.isCacheable()) {
funcall = new ArithmeticFuncall(vm, narrow);
}
return true;
}
return false;
}
/**
* Attempt to reuse last funcall cached in volatile JexlNode.value (if
* it was cacheable).
*
* @param ntarget the target instance
* @param mname the method name
* @param arguments the method arguments
* @return TRY_FAILED if invocation was not possible or failed, the
* result otherwise
*/
protected Object tryEval(final Object ntarget, final String mname, final Object[] arguments) {
// do we have a method/function name ?
// attempt to reuse last funcall cached in volatile JexlNode.value (if it was not a variable)
if (mname != null && cacheable && ntarget != null) {
Object cached = node.jjtGetValue();
if (cached instanceof Funcall) {
return ((Funcall) cached).tryInvoke(InterpreterBase.this, mname, ntarget, arguments);
}
}
return JexlEngine.TRY_FAILED;
}
/**
* Evaluates the method previously dispatched.
*
* @param mname the method name
* @return the method invocation result
* @throws Exception when invocation fails
*/
protected Object eval(String mname) throws Exception {
// we have either evaluated and returned or might have found a method
if (vm != null) {
// vm cannot be null if xjexl is null
Object eval = vm.invoke(target, argv);
// cache executor in volatile JexlNode.value
if (funcall != null) {
node.jjtSetValue(funcall);
}
return eval;
}
return unsolvableMethod(node, mname, argv);
}
}
/**
* Gets an attribute of an object.
*
* @param object to retrieve value from
* @param attribute the attribute of the object, e.g. an index (1, 0, 2) or key for a map
* @param node the node that evaluated as the object
* @return the attribute value
*/
protected Object getAttribute(Object object, Object attribute, JexlNode node) {
if (object == null) {
throw new JexlException(node, "object is null");
}
cancelCheck(node);
final JexlOperator operator = node != null && node.jjtGetParent() instanceof ASTArrayAccess
? JexlOperator.ARRAY_GET : JexlOperator.PROPERTY_GET;
Object result = operators.tryOverload(node, operator, object, attribute);
if (result != JexlEngine.TRY_FAILED) {
return result;
}
Exception xcause = null;
try {
// attempt to reuse last executor cached in volatile JexlNode.value
if (node != null && cache) {
Object cached = node.jjtGetValue();
if (cached instanceof JexlPropertyGet) {
JexlPropertyGet vg = (JexlPropertyGet) cached;
Object value = vg.tryInvoke(object, attribute);
if (!vg.tryFailed(value)) {
return value;
}
}
}
// resolve that property
List<JexlUberspect.PropertyResolver> resolvers = uberspect.getResolvers(operator, object);
JexlPropertyGet vg = uberspect.getPropertyGet(resolvers, object, attribute);
if (vg != null) {
Object value = vg.invoke(object);
// cache executor in volatile JexlNode.value
if (node != null && cache && vg.isCacheable()) {
node.jjtSetValue(vg);
}
return value;
}
} catch (Exception xany) {
xcause = xany;
}
// lets fail
if (node != null) {
boolean safe = (node instanceof ASTIdentifierAccess) && ((ASTIdentifierAccess) node).isSafe();
if (safe) {
return null;
} else {
String attrStr = attribute != null ? attribute.toString() : null;
return unsolvableProperty(node, attrStr, true, xcause);
}
} else {
// direct call
String error = "unable to get object property"
+ ", class: " + object.getClass().getName()
+ ", property: " + attribute;
throw new UnsupportedOperationException(error, xcause);
}
}
/**
* Sets an attribute of an object.
*
* @param object to set the value to
* @param attribute the attribute of the object, e.g. an index (1, 0, 2) or key for a map
* @param value the value to assign to the object's attribute
* @param node the node that evaluated as the object
*/
protected void setAttribute(Object object, Object attribute, Object value, JexlNode node) {
cancelCheck(node);
final JexlOperator operator = node != null && node.jjtGetParent() instanceof ASTArrayAccess
? JexlOperator.ARRAY_SET : JexlOperator.PROPERTY_SET;
Object result = operators.tryOverload(node, operator, object, attribute, value);
if (result != JexlEngine.TRY_FAILED) {
return;
}
Exception xcause = null;
try {
// attempt to reuse last executor cached in volatile JexlNode.value
if (node != null && cache) {
Object cached = node.jjtGetValue();
if (cached instanceof JexlPropertySet) {
JexlPropertySet setter = (JexlPropertySet) cached;
Object eval = setter.tryInvoke(object, attribute, value);
if (!setter.tryFailed(eval)) {
return;
}
}
}
List<JexlUberspect.PropertyResolver> resolvers = uberspect.getResolvers(operator, object);
JexlPropertySet vs = uberspect.getPropertySet(resolvers, object, attribute, value);
// if we can't find an exact match, narrow the value argument and try again
if (vs == null) {
// replace all numbers with the smallest type that will fit
Object[] narrow = {value};
if (arithmetic.narrowArguments(narrow)) {
vs = uberspect.getPropertySet(resolvers, object, attribute, narrow[0]);
}
}
if (vs != null) {
// cache executor in volatile JexlNode.value
vs.invoke(object, value);
if (node != null && cache && vs.isCacheable()) {
node.jjtSetValue(vs);
}
return;
}
} catch (Exception xany) {
xcause = xany;
}
// lets fail
if (node != null) {
String attrStr = attribute != null ? attribute.toString() : null;
unsolvableProperty(node, attrStr, true, xcause);
} else {
// direct call
String error = "unable to set object property"
+ ", class: " + object.getClass().getName()
+ ", property: " + attribute
+ ", argument: " + value.getClass().getSimpleName();
throw new UnsupportedOperationException(error, xcause);
}
}
}
| src/main/java/org/apache/commons/jexl3/internal/InterpreterBase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.jexl3.internal;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.jexl3.JexlArithmetic;
import org.apache.commons.jexl3.JexlContext;
import org.apache.commons.jexl3.JexlEngine;
import org.apache.commons.jexl3.JexlException;
import org.apache.commons.jexl3.JexlException.VariableIssue;
import org.apache.commons.jexl3.JexlOperator;
import org.apache.commons.jexl3.JexlOptions;
import org.apache.commons.jexl3.introspection.JexlMethod;
import org.apache.commons.jexl3.introspection.JexlPropertyGet;
import org.apache.commons.jexl3.introspection.JexlPropertySet;
import org.apache.commons.jexl3.introspection.JexlUberspect;
import org.apache.commons.jexl3.parser.ASTArrayAccess;
import org.apache.commons.jexl3.parser.ASTAssignment;
import org.apache.commons.jexl3.parser.ASTFunctionNode;
import org.apache.commons.jexl3.parser.ASTIdentifier;
import org.apache.commons.jexl3.parser.ASTIdentifierAccess;
import org.apache.commons.jexl3.parser.ASTMethodNode;
import org.apache.commons.jexl3.parser.ASTReference;
import org.apache.commons.jexl3.parser.ASTVar;
import org.apache.commons.jexl3.parser.JexlNode;
import org.apache.commons.jexl3.parser.ParserVisitor;
import org.apache.commons.logging.Log;
/**
* The helper base of an interpreter of JEXL syntax.
* @since 3.0
*/
public abstract class InterpreterBase extends ParserVisitor {
/** The JEXL engine. */
protected final Engine jexl;
/** The logger. */
protected final Log logger;
/** The uberspect. */
protected final JexlUberspect uberspect;
/** The arithmetic handler. */
protected final JexlArithmetic arithmetic;
/** The context to store/retrieve variables. */
protected final JexlContext context;
/** The options. */
protected final JexlOptions options;
/** Cache executors. */
protected final boolean cache;
/** Cancellation support. */
protected final AtomicBoolean cancelled;
/** Empty parameters for method matching. */
protected static final Object[] EMPTY_PARAMS = new Object[0];
/** The namespace resolver. */
protected final JexlContext.NamespaceResolver ns;
/** The operators evaluation delegate. */
protected final Operators operators;
/** The map of 'prefix:function' to object resolving as namespaces. */
protected final Map<String, Object> functions;
/** The map of dynamically creates namespaces, NamespaceFunctor or duck-types of those. */
protected Map<String, Object> functors;
/**
* Creates an interpreter base.
* @param engine the engine creating this interpreter
* @param opts the evaluation options
* @param aContext the evaluation context
*/
protected InterpreterBase(Engine engine, JexlOptions opts, JexlContext aContext) {
this.jexl = engine;
this.logger = jexl.logger;
this.uberspect = jexl.uberspect;
this.context = aContext != null ? aContext : Engine.EMPTY_CONTEXT;
this.cache = engine.cache != null;
JexlArithmetic jexla = jexl.arithmetic;
this.options = opts == null? engine.options(aContext) : opts;
this.arithmetic = jexla.options(options);
if (arithmetic != jexla && !arithmetic.getClass().equals(jexla.getClass())) {
logger.warn("expected arithmetic to be " + jexla.getClass().getSimpleName()
+ ", got " + arithmetic.getClass().getSimpleName()
);
}
if (this.context instanceof JexlContext.NamespaceResolver) {
ns = ((JexlContext.NamespaceResolver) context);
} else {
ns = Engine.EMPTY_NS;
}
AtomicBoolean acancel = null;
if (this.context instanceof JexlContext.CancellationHandle) {
acancel = ((JexlContext.CancellationHandle) context).getCancellation();
}
this.cancelled = acancel != null? acancel : new AtomicBoolean(false);
Map<String,Object> ons = options.getNamespaces();
this.functions = ons.isEmpty()? jexl.functions : ons;
this.functors = null;
this.operators = new Operators(this);
}
/**
* Copy constructor.
* @param ii the base to copy
* @param jexla the arithmetic instance to use (or null)
*/
protected InterpreterBase(InterpreterBase ii, JexlArithmetic jexla) {
jexl = ii.jexl;
logger = ii.logger;
uberspect = ii.uberspect;
arithmetic = jexla;
context = ii.context;
options = ii.options.copy();
cache = ii.cache;
ns = ii.ns;
operators = ii.operators;
cancelled = ii.cancelled;
functions = ii.functions;
functors = ii.functors;
}
/**
* Attempt to call close() if supported.
* <p>This is used when dealing with auto-closeable (duck-like) objects
* @param closeable the object we'd like to close
*/
protected void closeIfSupported(Object closeable) {
if (closeable != null) {
JexlMethod mclose = uberspect.getMethod(closeable, "close", EMPTY_PARAMS);
if (mclose != null) {
try {
mclose.invoke(closeable, EMPTY_PARAMS);
} catch (Exception xignore) {
logger.warn(xignore);
}
}
}
}
/**
* Resolves a namespace, eventually allocating an instance using context as constructor argument.
* <p>
* The lifetime of such instances span the current expression or script evaluation.</p>
* @param prefix the prefix name (may be null for global namespace)
* @param node the AST node
* @return the namespace instance
*/
protected Object resolveNamespace(String prefix, JexlNode node) {
Object namespace;
// check whether this namespace is a functor
synchronized (this) {
if (functors != null) {
namespace = functors.get(prefix);
if (namespace != null) {
return namespace;
}
}
}
// check if namespace is a resolver
namespace = ns.resolveNamespace(prefix);
if (namespace == null) {
namespace = functions.get(prefix);
if (prefix != null && namespace == null) {
throw new JexlException(node, "no such function namespace " + prefix, null);
}
}
// shortcut if ns is known to be not-a-functor
final boolean cacheable = cache;
Object cached = cacheable ? node.jjtGetValue() : null;
if (cached != JexlContext.NamespaceFunctor.class) {
// allow namespace to instantiate a functor with context if possible, not an error otherwise
Object functor = null;
if (namespace instanceof JexlContext.NamespaceFunctor) {
functor = ((JexlContext.NamespaceFunctor) namespace).createFunctor(context);
} else if (namespace instanceof Class<?> || namespace instanceof String) {
// attempt to reuse last ctor cached in volatile JexlNode.value
if (cached instanceof JexlMethod) {
try {
Object eval = ((JexlMethod) cached).tryInvoke(null, context);
if (JexlEngine.TRY_FAILED != eval) {
functor = eval;
}
} catch (JexlException.TryFailed xtry) {
throw new JexlException(node, "unable to instantiate namespace " + prefix, xtry.getCause());
}
}
if (functor == null) {
// find a ctor with that context class
JexlMethod ctor = uberspect.getConstructor(namespace, context);
if (ctor != null) {
try {
functor = ctor.invoke(namespace, context);
if (cacheable && ctor.isCacheable()) {
node.jjtSetValue(ctor);
}
} catch (Exception xinst) {
throw new JexlException(node, "unable to instantiate namespace " + prefix, xinst);
}
}
// find a ctor with no arg
if (functor == null) {
ctor = uberspect.getConstructor(namespace);
if (ctor != null) {
try {
functor = ctor.invoke(namespace);
} catch (Exception xinst) {
throw new JexlException(node, "unable to instantiate namespace " + prefix, xinst);
}
}
// use a class, namespace of static methods
if (functor == null) {
// try to find a class with that name
if (namespace instanceof String) {
try {
functor = uberspect.getClassLoader().loadClass((String) namespace);
} catch (ClassNotFoundException xignore) {
// not a class
namespace = null;
}
} else { // we know its a class
functor = (Class<?>) namespace;
}
}
}
}
}
// got a functor, store it and return it
if (functor != null) {
synchronized (this) {
if (functors == null) {
functors = new HashMap<>();
}
functors.put(prefix, functor);
}
return functor;
} else {
// use the NamespaceFunctor class to tag this node as not-a-functor
node.jjtSetValue(JexlContext.NamespaceFunctor.class);
}
}
return namespace;
}
/**
* Defines a variable.
* @param var the variable to define
* @param frame the frame in which it will be defined
* @return true if definition succeeded, false otherwise
*/
protected boolean defineVariable(ASTVar var, LexicalFrame frame) {
int symbol = var.getSymbol();
if (symbol < 0) {
return false;
}
if (var.isRedefined()) {
return false;
}
return frame.defineSymbol(symbol, var.isCaptured());
}
/**
* Checks whether a variable is defined.
* <p>The var may be either a local variable declared in the frame and
* visible from the block or defined in the context.
* @param frame the frame
* @param block the block
* @param name the variable name
* @return true if variable is defined, false otherwise
*/
protected boolean isVariableDefined(Frame frame, LexicalScope block, String name) {
if (frame != null && block != null) {
Integer ref = frame.getScope().getSymbol(name);
int symbol = ref != null? ref : -1;
if (symbol >= 0 && block.hasSymbol(symbol)) {
Object value = frame.get(symbol);
return value != Scope.UNDEFINED && value != Scope.UNDECLARED;
}
}
return context.has(name);
}
/**
* Gets a value of a defined local variable or from the context.
* @param frame the local frame
* @param block the lexical block if any
* @param identifier the variable node
* @return the value
*/
protected Object getVariable(Frame frame, LexicalScope block, ASTIdentifier identifier) {
int symbol = identifier.getSymbol();
// if we have a symbol, we have a scope thus a frame
if (options.isLexicalShade() && identifier.isShaded()) {
return undefinedVariable(identifier, identifier.getName());
}
if (symbol >= 0) {
if (frame.has(symbol)) {
Object value = frame.get(symbol);
if (value != Scope.UNDEFINED) {
return value;
}
}
}
String name = identifier.getName();
Object value = context.get(name);
if (value == null && !context.has(name)) {
boolean ignore = (isSafe()
&& (symbol >= 0
|| identifier.jjtGetParent() instanceof ASTAssignment))
|| (identifier.jjtGetParent() instanceof ASTReference);
if (!ignore) {
return unsolvableVariable(identifier, name, true); // undefined
}
}
return value;
}
/**
* Sets a variable in the global context.
* <p>If interpretation applies lexical shade, the variable must exist (ie
* the context has(...) method returns true) otherwise an error occurs.
* @param node the node
* @param name the variable name
* @param value the variable value
*/
protected void setContextVariable(JexlNode node, String name, Object value) {
if (options.isLexicalShade() && !context.has(name)) {
throw new JexlException.Variable(node, name, true);
}
try {
context.set(name, value);
} catch (UnsupportedOperationException xsupport) {
throw new JexlException(node, "context is readonly", xsupport);
}
}
/**
* Whether this interpreter is currently evaluating with a strict engine flag.
* @return true if strict engine, false otherwise
*/
protected boolean isStrictEngine() {
return options.isStrict();
}
/**
* Whether this interpreter ignores null in navigation expression as errors.
* @return true if safe, false otherwise
*/
protected boolean isSafe() {
return options.isSafe();
}
/**
* Whether this interpreter is currently evaluating with a silent mode.
* @return true if silent, false otherwise
*/
protected boolean isSilent() {
return options.isSilent();
}
/**
* @return true if interrupt throws a JexlException.Cancel.
*/
protected boolean isCancellable() {
return options.isCancellable();
}
/**
* Finds the node causing a NPE for diadic operators.
* @param xrt the RuntimeException
* @param node the parent node
* @param left the left argument
* @param right the right argument
* @return the left, right or parent node
*/
protected JexlNode findNullOperand(RuntimeException xrt, JexlNode node, Object left, Object right) {
if (xrt instanceof JexlArithmetic.NullOperand) {
if (left == null) {
return node.jjtGetChild(0);
}
if (right == null) {
return node.jjtGetChild(1);
}
}
return node;
}
/**
* Triggered when a variable can not be resolved.
* @param node the node where the error originated from
* @param var the variable name
* @param undef whether the variable is undefined or null
* @return throws JexlException if strict and not silent, null otherwise
*/
protected Object unsolvableVariable(JexlNode node, String var, boolean undef) {
return variableError(node, var, undef? VariableIssue.UNDEFINED : VariableIssue.NULLVALUE);
}
/**
* Triggered when a variable is lexically known as undefined.
* @param node the node where the error originated from
* @param var the variable name
* @return throws JexlException if strict and not silent, null otherwise
*/
protected Object undefinedVariable(JexlNode node, String var) {
return variableError(node, var, VariableIssue.UNDEFINED);
}
/**
* Triggered when a variable is lexically known as being redefined.
* @param node the node where the error originated from
* @param var the variable name
* @return throws JexlException if strict and not silent, null otherwise
*/
protected Object redefinedVariable(JexlNode node, String var) {
return variableError(node, var, VariableIssue.REDEFINED);
}
/**
* Triggered when a variable generates an issue.
* @param node the node where the error originated from
* @param var the variable name
* @param issue the issue type
* @return throws JexlException if strict and not silent, null otherwise
*/
protected Object variableError(JexlNode node, String var, VariableIssue issue) {
if (isStrictEngine() && !node.isTernaryProtected()) {
throw new JexlException.Variable(node, var, issue);
} else if (logger.isDebugEnabled()) {
logger.debug(JexlException.variableError(node, var, issue));
}
return null;
}
/**
* Triggered when a method can not be resolved.
* @param node the node where the error originated from
* @param method the method name
* @return throws JexlException if strict and not silent, null otherwise
*/
protected Object unsolvableMethod(JexlNode node, String method) {
return unsolvableMethod(node, method, null);
}
/**
* Triggered when a method can not be resolved.
* @param node the node where the error originated from
* @param method the method name
* @param args the method arguments
* @return throws JexlException if strict and not silent, null otherwise
*/
protected Object unsolvableMethod(JexlNode node, String method, Object[] args) {
if (isStrictEngine()) {
throw new JexlException.Method(node, method, args);
} else if (logger.isDebugEnabled()) {
logger.debug(JexlException.methodError(node, method, args));
}
return null;
}
/**
* Triggered when a property can not be resolved.
* @param node the node where the error originated from
* @param property the property node
* @param cause the cause if any
* @param undef whether the property is undefined or null
* @return throws JexlException if strict and not silent, null otherwise
*/
protected Object unsolvableProperty(JexlNode node, String property, boolean undef, Throwable cause) {
if (isStrictEngine() && !node.isTernaryProtected()) {
throw new JexlException.Property(node, property, undef, cause);
} else if (logger.isDebugEnabled()) {
logger.debug(JexlException.propertyError(node, property, undef));
}
return null;
}
/**
* Checks whether a reference child node holds a local variable reference.
* @param node the reference node
* @param which the child we are checking
* @return true if child is local variable, false otherwise
*/
protected boolean isLocalVariable(ASTReference node, int which) {
return (node.jjtGetNumChildren() > which
&& node.jjtGetChild(which) instanceof ASTIdentifier
&& ((ASTIdentifier) node.jjtGetChild(which)).getSymbol() >= 0);
}
/**
* Checks whether a reference child node holds a function call.
* @param node the reference node
* @return true if child is function call, false otherwise
*/
protected boolean isFunctionCall(ASTReference node) {
return (node.jjtGetNumChildren() > 0
&& node.jjtGetChild(0) instanceof ASTFunctionNode);
}
/**
* Pretty-prints a failing property (de)reference.
* <p>Used by calls to unsolvableProperty(...).</p>
* @param node the property node
* @return the (pretty) string
*/
protected String stringifyProperty(JexlNode node) {
if (node instanceof ASTArrayAccess) {
return "["
+ stringifyPropertyValue(node.jjtGetChild(0))
+ "]";
}
if (node instanceof ASTMethodNode) {
return stringifyPropertyValue(node.jjtGetChild(0));
}
if (node instanceof ASTFunctionNode) {
return stringifyPropertyValue(node.jjtGetChild(0));
}
if (node instanceof ASTIdentifier) {
return ((ASTIdentifier) node).getName();
}
if (node instanceof ASTReference) {
return stringifyProperty(node.jjtGetChild(0));
}
return stringifyPropertyValue(node);
}
/**
* Pretty-prints a failing property value (de)reference.
* <p>Used by calls to unsolvableProperty(...).</p>
* @param node the property node
* @return the (pretty) string value
*/
protected static String stringifyPropertyValue(JexlNode node) {
return node != null? new Debugger().depth(1).data(node) : "???";
}
/**
* Triggered when an operator fails.
* @param node the node where the error originated from
* @param operator the method name
* @param cause the cause of error (if any)
* @return throws JexlException if strict and not silent, null otherwise
*/
protected Object operatorError(JexlNode node, JexlOperator operator, Throwable cause) {
if (isStrictEngine()) {
throw new JexlException.Operator(node, operator.getOperatorSymbol(), cause);
} else if (logger.isDebugEnabled()) {
logger.debug(JexlException.operatorError(node, operator.getOperatorSymbol()), cause);
}
return null;
}
/**
* Triggered when an annotation processing fails.
* @param node the node where the error originated from
* @param annotation the annotation name
* @param cause the cause of error (if any)
* @return throws a JexlException if strict and not silent, null otherwise
*/
protected Object annotationError(JexlNode node, String annotation, Throwable cause) {
if (isStrictEngine()) {
throw new JexlException.Annotation(node, annotation, cause);
} else if (logger.isDebugEnabled()) {
logger.debug(JexlException.annotationError(node, annotation), cause);
}
return null;
}
/**
* Triggered when method, function or constructor invocation fails with an exception.
* @param node the node triggering the exception
* @param methodName the method/function name
* @param xany the cause
* @return a JexlException that will be thrown
*/
protected JexlException invocationException(JexlNode node, String methodName, Throwable xany) {
Throwable cause = xany.getCause();
if (cause instanceof JexlException) {
return (JexlException) cause;
}
if (cause instanceof InterruptedException) {
return new JexlException.Cancel(node);
}
return new JexlException(node, methodName, xany);
}
/**
* Cancels this evaluation, setting the cancel flag that will result in a JexlException.Cancel to be thrown.
* @return false if already cancelled, true otherwise
*/
protected boolean cancel() {
return cancelled.compareAndSet(false, true);
}
/**
* Checks whether this interpreter execution was cancelled due to thread interruption.
* @return true if cancelled, false otherwise
*/
protected boolean isCancelled() {
return cancelled.get() | Thread.currentThread().isInterrupted();
}
/**
* Throws a JexlException.Cancel if script execution was cancelled.
* @param node the node being evaluated
*/
protected void cancelCheck(JexlNode node) {
if (isCancelled()) {
throw new JexlException.Cancel(node);
}
}
/**
* Concatenate arguments in call(...).
* <p>When target == context, we are dealing with a global namespace function call
* @param target the pseudo-method owner, first to-be argument
* @param narrow whether we should attempt to narrow number arguments
* @param args the other (non null) arguments
* @return the arguments array
*/
protected Object[] functionArguments(Object target, boolean narrow, Object[] args) {
// when target == context, we are dealing with the null namespace
if (target == null || target == context) {
if (narrow) {
arithmetic.narrowArguments(args);
}
return args;
}
// makes target 1st args, copy others - optionally narrow numbers
Object[] nargv = new Object[args.length + 1];
if (narrow) {
nargv[0] = functionArgument(true, target);
for (int a = 1; a <= args.length; ++a) {
nargv[a] = functionArgument(true, args[a - 1]);
}
} else {
nargv[0] = target;
System.arraycopy(args, 0, nargv, 1, args.length);
}
return nargv;
}
/**
* Concatenate arguments in call(...).
* @param target the pseudo-method owner, first to-be argument
* @param narrow whether we should attempt to narrow number arguments
* @param args the other (non null) arguments
* @return the arguments array
*/
protected Object[] callArguments(Object target, boolean narrow, Object[] args) {
// makes target 1st args, copy others - optionally narrow numbers
Object[] nargv = new Object[args.length + 1];
if (narrow) {
nargv[0] = functionArgument(true, target);
for (int a = 1; a <= args.length; ++a) {
nargv[a] = functionArgument(true, args[a - 1]);
}
} else {
nargv[0] = target;
System.arraycopy(args, 0, nargv, 1, args.length);
}
return nargv;
}
/**
* Optionally narrows an argument for a function call.
* @param narrow whether narrowing should occur
* @param arg the argument
* @return the narrowed argument
*/
protected Object functionArgument(boolean narrow, Object arg) {
return narrow && arg instanceof Number ? arithmetic.narrow((Number) arg) : arg;
}
/**
* Cached function call.
*/
protected static class Funcall implements JexlNode.Funcall {
/** Whether narrow should be applied to arguments. */
protected final boolean narrow;
/** The JexlMethod to delegate the call to. */
protected final JexlMethod me;
/**
* Constructor.
* @param jme the method
* @param flag the narrow flag
*/
protected Funcall(JexlMethod jme, boolean flag) {
this.me = jme;
this.narrow = flag;
}
/**
* Try invocation.
* @param ii the interpreter
* @param name the method name
* @param target the method target
* @param args the method arguments
* @return the method invocation result (or JexlEngine.TRY_FAILED)
*/
protected Object tryInvoke(InterpreterBase ii, String name, Object target, Object[] args) {
return me.tryInvoke(name, target, ii.functionArguments(null, narrow, args));
}
}
/**
* Cached arithmetic function call.
*/
protected static class ArithmeticFuncall extends Funcall {
/**
* Constructor.
* @param jme the method
* @param flag the narrow flag
*/
protected ArithmeticFuncall(JexlMethod jme, boolean flag) {
super(jme, flag);
}
@Override
protected Object tryInvoke(InterpreterBase ii, String name, Object target, Object[] args) {
return me.tryInvoke(name, ii.arithmetic, ii.functionArguments(target, narrow, args));
}
}
/**
* Cached context function call.
*/
protected static class ContextFuncall extends Funcall {
/**
* Constructor.
* @param jme the method
* @param flag the narrow flag
*/
protected ContextFuncall(JexlMethod jme, boolean flag) {
super(jme, flag);
}
@Override
protected Object tryInvoke(InterpreterBase ii, String name, Object target, Object[] args) {
return me.tryInvoke(name, ii.context, ii.functionArguments(target, narrow, args));
}
}
/**
* A ctor that needs a context as 1st argument.
*/
protected static class ContextualCtor extends Funcall {
/**
* Constructor.
* @param jme the method
* @param flag the narrow flag
*/
protected ContextualCtor(JexlMethod jme, boolean flag) {
super(jme, flag);
}
@Override
protected Object tryInvoke(InterpreterBase ii, String name, Object target, Object[] args) {
return me.tryInvoke(name, target, ii.callArguments(ii.context, narrow, args));
}
}
/**
* Helping dispatch function calls.
*/
protected class CallDispatcher {
/**
* The syntactic node.
*/
final JexlNode node;
/**
* Whether solution is cacheable.
*/
boolean cacheable = true;
/**
* Whether arguments have been narrowed.
*/
boolean narrow = false;
/**
* The method to call.
*/
JexlMethod vm = null;
/**
* The method invocation target.
*/
Object target = null;
/**
* The actual arguments.
*/
Object[] argv = null;
/**
* The cacheable funcall if any.
*/
Funcall funcall = null;
/**
* Dispatcher ctor.
*
* @param anode the syntactic node.
* @param acacheable whether resolution can be cached
*/
CallDispatcher(JexlNode anode, boolean acacheable) {
this.node = anode;
this.cacheable = acacheable;
}
/**
* Whether the method is a target method.
*
* @param ntarget the target instance
* @param mname the method name
* @param arguments the method arguments
* @return true if arithmetic, false otherwise
*/
protected boolean isTargetMethod(Object ntarget, String mname, final Object[] arguments) {
// try a method
vm = uberspect.getMethod(ntarget, mname, arguments);
if (vm != null) {
argv = arguments;
target = ntarget;
if (cacheable && vm.isCacheable()) {
funcall = new Funcall(vm, narrow);
}
return true;
}
return false;
}
/**
* Whether the method is a context method.
*
* @param mname the method name
* @param arguments the method arguments
* @return true if arithmetic, false otherwise
*/
protected boolean isContextMethod(String mname, final Object[] arguments) {
vm = uberspect.getMethod(context, mname, arguments);
if (vm != null) {
argv = arguments;
target = context;
if (cacheable && vm.isCacheable()) {
funcall = new ContextFuncall(vm, narrow);
}
return true;
}
return false;
}
/**
* Whether the method is an arithmetic method.
*
* @param mname the method name
* @param arguments the method arguments
* @return true if arithmetic, false otherwise
*/
protected boolean isArithmeticMethod(String mname, final Object[] arguments) {
vm = uberspect.getMethod(arithmetic, mname, arguments);
if (vm != null) {
argv = arguments;
target = arithmetic;
if (cacheable && vm.isCacheable()) {
funcall = new ArithmeticFuncall(vm, narrow);
}
return true;
}
return false;
}
/**
* Attempt to reuse last funcall cached in volatile JexlNode.value (if
* it was cacheable).
*
* @param ntarget the target instance
* @param mname the method name
* @param arguments the method arguments
* @return TRY_FAILED if invocation was not possible or failed, the
* result otherwise
*/
protected Object tryEval(final Object ntarget, final String mname, final Object[] arguments) {
// do we have a method/function name ?
// attempt to reuse last funcall cached in volatile JexlNode.value (if it was not a variable)
if (mname != null && cacheable && ntarget != null) {
Object cached = node.jjtGetValue();
if (cached instanceof Funcall) {
return ((Funcall) cached).tryInvoke(InterpreterBase.this, mname, ntarget, arguments);
}
}
return JexlEngine.TRY_FAILED;
}
/**
* Evaluates the method previously dispatched.
*
* @param mname the method name
* @return the method invocation result
* @throws Exception when invocation fails
*/
protected Object eval(String mname) throws Exception {
// we have either evaluated and returned or might have found a method
if (vm != null) {
// vm cannot be null if xjexl is null
Object eval = vm.invoke(target, argv);
// cache executor in volatile JexlNode.value
if (funcall != null) {
node.jjtSetValue(funcall);
}
return eval;
}
return unsolvableMethod(node, mname, argv);
}
}
/**
* Gets an attribute of an object.
*
* @param object to retrieve value from
* @param attribute the attribute of the object, e.g. an index (1, 0, 2) or key for a map
* @param node the node that evaluated as the object
* @return the attribute value
*/
protected Object getAttribute(Object object, Object attribute, JexlNode node) {
if (object == null) {
throw new JexlException(node, "object is null");
}
cancelCheck(node);
final JexlOperator operator = node != null && node.jjtGetParent() instanceof ASTArrayAccess
? JexlOperator.ARRAY_GET : JexlOperator.PROPERTY_GET;
Object result = operators.tryOverload(node, operator, object, attribute);
if (result != JexlEngine.TRY_FAILED) {
return result;
}
Exception xcause = null;
try {
// attempt to reuse last executor cached in volatile JexlNode.value
if (node != null && cache) {
Object cached = node.jjtGetValue();
if (cached instanceof JexlPropertyGet) {
JexlPropertyGet vg = (JexlPropertyGet) cached;
Object value = vg.tryInvoke(object, attribute);
if (!vg.tryFailed(value)) {
return value;
}
}
}
// resolve that property
List<JexlUberspect.PropertyResolver> resolvers = uberspect.getResolvers(operator, object);
JexlPropertyGet vg = uberspect.getPropertyGet(resolvers, object, attribute);
if (vg != null) {
Object value = vg.invoke(object);
// cache executor in volatile JexlNode.value
if (node != null && cache && vg.isCacheable()) {
node.jjtSetValue(vg);
}
return value;
}
} catch (Exception xany) {
xcause = xany;
}
// lets fail
if (node != null) {
boolean safe = (node instanceof ASTIdentifierAccess) && ((ASTIdentifierAccess) node).isSafe();
if (safe) {
return null;
} else {
String attrStr = attribute != null ? attribute.toString() : null;
return unsolvableProperty(node, attrStr, true, xcause);
}
} else {
// direct call
String error = "unable to get object property"
+ ", class: " + object.getClass().getName()
+ ", property: " + attribute;
throw new UnsupportedOperationException(error, xcause);
}
}
/**
* Sets an attribute of an object.
*
* @param object to set the value to
* @param attribute the attribute of the object, e.g. an index (1, 0, 2) or key for a map
* @param value the value to assign to the object's attribute
* @param node the node that evaluated as the object
*/
protected void setAttribute(Object object, Object attribute, Object value, JexlNode node) {
cancelCheck(node);
final JexlOperator operator = node != null && node.jjtGetParent() instanceof ASTArrayAccess
? JexlOperator.ARRAY_SET : JexlOperator.PROPERTY_SET;
Object result = operators.tryOverload(node, operator, object, attribute, value);
if (result != JexlEngine.TRY_FAILED) {
return;
}
Exception xcause = null;
try {
// attempt to reuse last executor cached in volatile JexlNode.value
if (node != null && cache) {
Object cached = node.jjtGetValue();
if (cached instanceof JexlPropertySet) {
JexlPropertySet setter = (JexlPropertySet) cached;
Object eval = setter.tryInvoke(object, attribute, value);
if (!setter.tryFailed(eval)) {
return;
}
}
}
List<JexlUberspect.PropertyResolver> resolvers = uberspect.getResolvers(operator, object);
JexlPropertySet vs = uberspect.getPropertySet(resolvers, object, attribute, value);
// if we can't find an exact match, narrow the value argument and try again
if (vs == null) {
// replace all numbers with the smallest type that will fit
Object[] narrow = {value};
if (arithmetic.narrowArguments(narrow)) {
vs = uberspect.getPropertySet(resolvers, object, attribute, narrow[0]);
}
}
if (vs != null) {
// cache executor in volatile JexlNode.value
vs.invoke(object, value);
if (node != null && cache && vs.isCacheable()) {
node.jjtSetValue(vs);
}
return;
}
} catch (Exception xany) {
xcause = xany;
}
// lets fail
if (node != null) {
String attrStr = attribute != null ? attribute.toString() : null;
unsolvableProperty(node, attrStr, true, xcause);
} else {
// direct call
String error = "unable to set object property"
+ ", class: " + object.getClass().getName()
+ ", property: " + attribute
+ ", argument: " + value.getClass().getSimpleName();
throw new UnsupportedOperationException(error, xcause);
}
}
}
| JEXL-333: fixed namespace resolution on static only (private ctor) classes (take 2)
Task #JEXL-333 - Allow declaration of namespace within script | src/main/java/org/apache/commons/jexl3/internal/InterpreterBase.java | JEXL-333: fixed namespace resolution on static only (private ctor) classes (take 2) Task #JEXL-333 - Allow declaration of namespace within script |
|
Java | apache-2.0 | cac2fd5f7c7e7c1fbde2cec317bb76817699804e | 0 | clhedrick/sakai,Fudan-University/sakai,joserabal/sakai,frasese/sakai,ktakacs/sakai,liubo404/sakai,lorenamgUMU/sakai,OpenCollabZA/sakai,ktakacs/sakai,kingmook/sakai,bzhouduke123/sakai,buckett/sakai-gitflow,bkirschn/sakai,OpenCollabZA/sakai,ouit0408/sakai,liubo404/sakai,conder/sakai,OpenCollabZA/sakai,puramshetty/sakai,udayg/sakai,udayg/sakai,bkirschn/sakai,clhedrick/sakai,rodriguezdevera/sakai,colczr/sakai,wfuedu/sakai,joserabal/sakai,colczr/sakai,tl-its-umich-edu/sakai,lorenamgUMU/sakai,pushyamig/sakai,zqian/sakai,wfuedu/sakai,ouit0408/sakai,rodriguezdevera/sakai,kingmook/sakai,rodriguezdevera/sakai,liubo404/sakai,ktakacs/sakai,wfuedu/sakai,zqian/sakai,hackbuteer59/sakai,introp-software/sakai,udayg/sakai,kwedoff1/sakai,frasese/sakai,hackbuteer59/sakai,noondaysun/sakai,Fudan-University/sakai,noondaysun/sakai,frasese/sakai,joserabal/sakai,Fudan-University/sakai,pushyamig/sakai,lorenamgUMU/sakai,kingmook/sakai,zqian/sakai,zqian/sakai,rodriguezdevera/sakai,frasese/sakai,whumph/sakai,bzhouduke123/sakai,clhedrick/sakai,clhedrick/sakai,colczr/sakai,pushyamig/sakai,tl-its-umich-edu/sakai,colczr/sakai,bkirschn/sakai,rodriguezdevera/sakai,hackbuteer59/sakai,liubo404/sakai,duke-compsci290-spring2016/sakai,bkirschn/sakai,frasese/sakai,bkirschn/sakai,Fudan-University/sakai,zqian/sakai,ouit0408/sakai,puramshetty/sakai,lorenamgUMU/sakai,OpenCollabZA/sakai,rodriguezdevera/sakai,surya-janani/sakai,puramshetty/sakai,colczr/sakai,rodriguezdevera/sakai,bzhouduke123/sakai,noondaysun/sakai,pushyamig/sakai,whumph/sakai,conder/sakai,noondaysun/sakai,willkara/sakai,wfuedu/sakai,kwedoff1/sakai,noondaysun/sakai,tl-its-umich-edu/sakai,liubo404/sakai,ktakacs/sakai,Fudan-University/sakai,ktakacs/sakai,puramshetty/sakai,conder/sakai,whumph/sakai,willkara/sakai,surya-janani/sakai,bkirschn/sakai,Fudan-University/sakai,liubo404/sakai,buckett/sakai-gitflow,willkara/sakai,lorenamgUMU/sakai,bkirschn/sakai,ktakacs/sakai,colczr/sakai,introp-software/sakai,hackbuteer59/sakai,OpenCollabZA/sakai,ouit0408/sakai,kwedoff1/sakai,bzhouduke123/sakai,kingmook/sakai,tl-its-umich-edu/sakai,tl-its-umich-edu/sakai,kingmook/sakai,duke-compsci290-spring2016/sakai,buckett/sakai-gitflow,introp-software/sakai,buckett/sakai-gitflow,clhedrick/sakai,tl-its-umich-edu/sakai,ouit0408/sakai,kwedoff1/sakai,whumph/sakai,ouit0408/sakai,duke-compsci290-spring2016/sakai,tl-its-umich-edu/sakai,joserabal/sakai,lorenamgUMU/sakai,willkara/sakai,surya-janani/sakai,rodriguezdevera/sakai,whumph/sakai,whumph/sakai,ouit0408/sakai,willkara/sakai,Fudan-University/sakai,frasese/sakai,kingmook/sakai,willkara/sakai,hackbuteer59/sakai,bzhouduke123/sakai,duke-compsci290-spring2016/sakai,introp-software/sakai,kingmook/sakai,joserabal/sakai,ouit0408/sakai,willkara/sakai,hackbuteer59/sakai,surya-janani/sakai,kingmook/sakai,hackbuteer59/sakai,surya-janani/sakai,joserabal/sakai,udayg/sakai,udayg/sakai,duke-compsci290-spring2016/sakai,conder/sakai,ktakacs/sakai,Fudan-University/sakai,kwedoff1/sakai,puramshetty/sakai,udayg/sakai,frasese/sakai,whumph/sakai,puramshetty/sakai,tl-its-umich-edu/sakai,kwedoff1/sakai,zqian/sakai,noondaysun/sakai,joserabal/sakai,duke-compsci290-spring2016/sakai,introp-software/sakai,clhedrick/sakai,buckett/sakai-gitflow,bzhouduke123/sakai,clhedrick/sakai,udayg/sakai,buckett/sakai-gitflow,frasese/sakai,lorenamgUMU/sakai,conder/sakai,colczr/sakai,kwedoff1/sakai,noondaysun/sakai,wfuedu/sakai,introp-software/sakai,lorenamgUMU/sakai,hackbuteer59/sakai,joserabal/sakai,udayg/sakai,bkirschn/sakai,duke-compsci290-spring2016/sakai,puramshetty/sakai,conder/sakai,buckett/sakai-gitflow,OpenCollabZA/sakai,introp-software/sakai,wfuedu/sakai,surya-janani/sakai,puramshetty/sakai,surya-janani/sakai,colczr/sakai,introp-software/sakai,clhedrick/sakai,pushyamig/sakai,OpenCollabZA/sakai,buckett/sakai-gitflow,conder/sakai,wfuedu/sakai,noondaysun/sakai,zqian/sakai,conder/sakai,pushyamig/sakai,liubo404/sakai,OpenCollabZA/sakai,wfuedu/sakai,surya-janani/sakai,kwedoff1/sakai,bzhouduke123/sakai,whumph/sakai,zqian/sakai,bzhouduke123/sakai,liubo404/sakai,willkara/sakai,pushyamig/sakai,ktakacs/sakai,pushyamig/sakai,duke-compsci290-spring2016/sakai | /**********************************************************************************
* $URL: https://source.sakaiproject.org/svn/msgcntr/trunk/messageforums-app/src/java/org/sakaiproject/tool/messageforums/MessageForumsFilePickerServlet.java $
* $Id: MessageForumsFilePickerServlet.java 9227 2006-05-15 15:02:42Z [email protected] $
***********************************************************************************
*
* Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.tool.messageforums;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.Enumeration;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.sakaiproject.site.api.SiteService;
import org.sakaiproject.tool.api.ToolSession;
import org.sakaiproject.tool.cover.SessionManager;
import org.sakaiproject.tool.api.ActiveTool;
import org.sakaiproject.tool.api.Tool;
import org.sakaiproject.tool.cover.ActiveToolManager;
import org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager;
import org.sakaiproject.component.cover.ComponentManager;
import org.sakaiproject.entitybroker.EntityReference;
import org.sakaiproject.entitybroker.access.HttpServletAccessProvider;
import org.sakaiproject.entitybroker.access.HttpServletAccessProviderManager;
import org.sakaiproject.exception.IdUnusedException;
import org.sakaiproject.jsf.util.JsfTool;
import org.sakaiproject.util.Web;
import org.sakaiproject.tool.api.ToolException;
/**
* @author Chen Wen
* @version $Id$
*
*/
public class MessageForumsFilePickerServlet extends JsfTool implements HttpServletAccessProvider {
private static final String HELPER_EXT = ".helper";
private static final String HELPER_SESSION_PREFIX = "session.";
private boolean initComplete = false;
private SiteService siteService;
private HttpServletAccessProviderManager accessProviderManager;
private DiscussionForumManager forumManager;
protected void dispatch(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException {
// NOTE: this is a simple path dispatching, taking the path as the view
// id = jsp file name for the view,
// with default used if no path and a path prefix as configured.
// TODO: need to allow other sorts of dispatching, such as pulling out
// drill-down ids and making them
// available to the JSF
// build up the target that will be dispatched to
String target = req.getPathInfo();
// see if we have a helper request
if (sendToHelper(req, res, target)) {
return;
}
// see if we have a resource request - i.e. a path with an extension,
// and one that is not the JSF_EXT
if (isResourceRequest(target)) {
// get a dispatcher to the path
RequestDispatcher resourceDispatcher = getServletContext().getRequestDispatcher(target);
if (resourceDispatcher != null) {
resourceDispatcher.forward(req, res);
return;
}
}
if ("Title".equals(req.getParameter("panel"))) {
// This allows only one Title JSF for each tool
target = "/title.jsf";
}
else {
ToolSession session = SessionManager.getCurrentToolSession();
if (target == null || "/".equals(target)) {
target = computeDefaultTarget(true);
// make sure it's a valid path
if (!target.startsWith("/")) {
target = "/" + target;
}
// now that we've messed with the URL, send a redirect to make
// it official
res.sendRedirect(Web.returnUrl(req, target));
return;
}
// see if we want to change the specifically requested view
String newTarget = redirectRequestedTarget(target);
// make sure it's a valid path
if (!newTarget.startsWith("/")) {
newTarget = "/" + newTarget;
}
if (!newTarget.equals(target)) {
// now that we've messed with the URL, send a redirect to make
// it official
res.sendRedirect(Web.returnUrl(req, newTarget));
return;
}
target = newTarget;
// store this
if (m_defaultToLastView) {
session.setAttribute(LAST_VIEW_VISITED, target);
}
}
// add the configured folder root and extension (if missing)
target = m_path + target;
// add the default JSF extension (if we have no extension)
int lastSlash = target.lastIndexOf("/");
int lastDot = target.lastIndexOf(".");
if (lastDot < 0 || lastDot < lastSlash) {
target += JSF_EXT;
}
// set the information that can be removed from return URLs
req.setAttribute(URL_PATH, m_path);
req.setAttribute(URL_EXT, ".jsp");
// set the sakai request object wrappers to provide the native, not
// Sakai set up, URL information
// - this assures that the FacesServlet can dispatch to the proper view
// based on the path info
req.setAttribute(Tool.NATIVE_URL, Tool.NATIVE_URL);
// TODO: Should setting the HTTP headers be moved up to the portal level
// as well?
res.setContentType("text/html; charset=UTF-8");
res.addDateHeader("Expires", System.currentTimeMillis() - (1000L * 60L * 60L * 24L * 365L));
res.addDateHeader("Last-Modified", System.currentTimeMillis());
res.addHeader("Cache-Control", "no-store, no-cache, must-revalidate, max-age=0, post-check=0, pre-check=0");
res.addHeader("Pragma", "no-cache");
// dispatch to the target
RequestDispatcher dispatcher = getServletContext().getRequestDispatcher(target);
dispatcher.forward(req, res);
// restore the request object
req.removeAttribute(Tool.NATIVE_URL);
req.removeAttribute(URL_PATH);
req.removeAttribute(URL_EXT);
}
protected boolean sendToHelper(HttpServletRequest req, HttpServletResponse res, String target)
throws ToolException
{
String path = req.getPathInfo();
if (path == null)
path = "/";
// if synoptic helper is obscuring add attachment helper, remove it
final int helperIndex = path.indexOf("sakai.filepicker");
// filepicker helper should be first part of path
if (helperIndex > 1 ) {
path = path.substring(helperIndex-1);
target = path;
}
//
// 0 parts means the path was just "/", otherwise parts[0] = "",
// parts[1] = item id, parts[2] if present is "edit"...
String[] parts = path.split("/");
if (parts.length < 2) {
return false;
}
/* if (parts.length < 3) {
// return false;
}*/
if (!parts[1].endsWith(HELPER_EXT)) {
return false;
}
/*
if (!parts[2].endsWith(HELPER_EXT)) {
return false;
}
*/
ToolSession toolSession = SessionManager.getCurrentToolSession();
Enumeration params = req.getParameterNames();
while (params.hasMoreElements()) {
String paramName = (String) params.nextElement();
if (paramName.startsWith(HELPER_SESSION_PREFIX)) {
String attributeName = paramName.substring(HELPER_SESSION_PREFIX.length());
toolSession.setAttribute(attributeName, req.getParameter(paramName));
}
}
// calc helper id
int posEnd = parts[1].lastIndexOf(".");
////int posEnd = parts[2].lastIndexOf(".");
String helperId = target.substring(1, posEnd + 1);
////String helperId = parts[2].substring(0, posEnd);
ActiveTool helperTool = ActiveToolManager.getActiveTool(helperId);
if (toolSession.getAttribute(helperTool.getId() + Tool.HELPER_DONE_URL) == null
&& !target.equals("/sakai.filepicker.helper")) {
toolSession.setAttribute(helperTool.getId() + Tool.HELPER_DONE_URL, req.getContextPath() + req.getServletPath() + computeDefaultTarget(true));
}
String context = req.getContextPath() + req.getServletPath() + Web.makePath(parts, 1, 2);
////String context = req.getContextPath() + req.getServletPath() + Web.makePath(parts, 2, 3);
String toolPath = Web.makePath(parts, 2, parts.length);
////String toolPath = Web.makePath(parts, 3, parts.length);
helperTool.help(req, res, context, toolPath);
return true; // was handled as helper call
}
protected String computeDefaultTarget(boolean lastVisited) {
// setup for the default view as configured
String target = "/" + m_default;
// if we are doing lastVisit and there's a last-visited view, for this
// tool placement / user, use that
if (lastVisited) {
ToolSession session = SessionManager.getCurrentToolSession();
String last = (String) session.getAttribute(LAST_VIEW_VISITED);
if (last != null) {
target = last;
}
}
return target;
}
/**
* Initialize the servlet.
*
* @param config
* The servlet config.
* @throws ServletException
*/
public void init(ServletConfig config) throws ServletException {
super.init(config);
try {
//load service level dependecies from the ComponentManager
siteService = (SiteService) ComponentManager.get("org.sakaiproject.site.api.SiteService");
accessProviderManager = (HttpServletAccessProviderManager) ComponentManager
.get("org.sakaiproject.entitybroker.access.HttpServletAccessProviderManager");
forumManager = (DiscussionForumManager) ComponentManager
.get("org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager");
//register forum Entity prefixes for direct servlet request handling
if (accessProviderManager != null) {
accessProviderManager.registerProvider("forum_topic", this);
accessProviderManager.registerProvider("forum", this);
accessProviderManager.registerProvider("forum_message", this);
}
//mark initialization of dependecies as complete
if (siteService != null && forumManager != null)
initComplete = true;
} catch (Exception e) {
e.printStackTrace();
}
}
public void handleAccess(HttpServletRequest req, HttpServletResponse res, EntityReference ref) {
//don't bother if the user is not logged in
if (req.getRemoteUser() == null) {
try {
String url = req.getRequestURL().toString();
String context = req.getContextPath();
String prefix = url.substring(0,url.lastIndexOf(context));
res.sendRedirect(prefix + "/authn/login?url="
+ URLEncoder.encode(req.getRequestURL().toString(), "UTF-8"));
return;
}
catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
catch (IOException e) {
e.printStackTrace();
}
}
String[] parts = req.getPathInfo().split("/");
//ensure we have our dependencies and something reassembling proper input
if (initComplete && parts.length > 2) {
String context = "";
if ("forum_topic".equals(parts[1]))
context = forumManager.getContextForTopicById(Long.valueOf(parts[2]));
else if ("forum".equals(parts[1]))
context = forumManager.getContextForForumById(Long.valueOf(parts[2]));
else if ("forum_message".equals(parts[1]))
context = forumManager.getContextForMessageById(Long.valueOf(parts[2]));
String placementId = "";
String target = "";
//Calculate the placement for the Entity... if you know of a better way, please
//replace this!
try {
if (siteService.getSite(context).getToolForCommonId("sakai.forums") != null) {
placementId = siteService.getSite(context)
.getToolForCommonId("sakai.forums").getId();
}
else {
placementId = siteService.getSite(context)
.getToolForCommonId("sakai.messagecenter").getId();
}
}
catch (IdUnusedException iue) {
iue.printStackTrace();
}
//TODO: I've tried (and failed) a number of things here to try to get this to work
//without a redirect using a block of setting all the placement and session things
//into the the threadlocal manager here resulted in the closest thing... rendered
//the page, but links were broken, might be doable with more time or fresh eyes
//though.
//direct the request to the proper 'direct' view with needed parameters
if ("forum_topic".equals(parts[1])) {
req.setAttribute("topicId", parts[2]);
target = "/jsp/discussionForum/message/dfAllMessagesDirect.jsf?topicId="
+ parts[2] + "&placementId=" + placementId;
}
else if ("forum".equals(parts[1])) {
target = "/jsp/discussionForum/forum/dfForumDirect.jsf?forumId="
+ parts[2] + "&placementId=" + placementId;
}
else if ("forum_message".equals(parts[1])) {
target = "/jsp/discussionForum/message/dfViewMessageDirect.jsf?messageId="
+ parts[2] + "&placementId=" + placementId + "&topicId="
+ forumManager.getMessageById(Long.valueOf(parts[2])).getTopic().getId()
+ "&forumId=" + forumManager.ForumIdForMessage(Long.valueOf(parts[2]));
}
// dispatch to the target
RequestDispatcher dispatcher = getServletContext().getRequestDispatcher(target);
try {
dispatcher.forward(req, res);
}
catch (ServletException e) {
e.printStackTrace();
}
catch (IOException e) {
e.printStackTrace();
}
}
}
}
| msgcntr/messageforums-app/src/java/org/sakaiproject/tool/messageforums/MessageForumsFilePickerServlet.java | /**********************************************************************************
* $URL: https://source.sakaiproject.org/svn/msgcntr/trunk/messageforums-app/src/java/org/sakaiproject/tool/messageforums/MessageForumsFilePickerServlet.java $
* $Id: MessageForumsFilePickerServlet.java 9227 2006-05-15 15:02:42Z [email protected] $
***********************************************************************************
*
* Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.tool.messageforums;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.Enumeration;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.sakaiproject.site.api.SiteService;
import org.sakaiproject.tool.api.ToolSession;
import org.sakaiproject.tool.cover.SessionManager;
import org.sakaiproject.tool.api.ActiveTool;
import org.sakaiproject.tool.api.Tool;
import org.sakaiproject.tool.cover.ActiveToolManager;
import org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager;
import org.sakaiproject.component.cover.ComponentManager;
import org.sakaiproject.entitybroker.EntityReference;
import org.sakaiproject.entitybroker.access.HttpServletAccessProvider;
import org.sakaiproject.entitybroker.access.HttpServletAccessProviderManager;
import org.sakaiproject.exception.IdUnusedException;
import org.sakaiproject.jsf.util.JsfTool;
import org.sakaiproject.util.Web;
import org.sakaiproject.tool.api.ToolException;
/**
* @author Chen Wen
* @version $Id$
*
*/
public class MessageForumsFilePickerServlet extends JsfTool implements HttpServletAccessProvider {
private static final String HELPER_EXT = ".helper";
private static final String HELPER_SESSION_PREFIX = "session.";
private boolean initComplete = false;
private SiteService siteService;
private HttpServletAccessProviderManager accessProviderManager;
private DiscussionForumManager forumManager;
protected void dispatch(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException {
// NOTE: this is a simple path dispatching, taking the path as the view
// id = jsp file name for the view,
// with default used if no path and a path prefix as configured.
// TODO: need to allow other sorts of dispatching, such as pulling out
// drill-down ids and making them
// available to the JSF
// build up the target that will be dispatched to
String target = req.getPathInfo();
// see if we have a helper request
if (sendToHelper(req, res, target)) {
return;
}
// see if we have a resource request - i.e. a path with an extension,
// and one that is not the JSF_EXT
if (isResourceRequest(target)) {
// get a dispatcher to the path
RequestDispatcher resourceDispatcher = getServletContext().getRequestDispatcher(target);
if (resourceDispatcher != null) {
resourceDispatcher.forward(req, res);
return;
}
}
if ("Title".equals(req.getParameter("panel"))) {
// This allows only one Title JSF for each tool
target = "/title.jsf";
}
else {
ToolSession session = SessionManager.getCurrentToolSession();
if (target == null || "/".equals(target)) {
target = computeDefaultTarget(true);
// make sure it's a valid path
if (!target.startsWith("/")) {
target = "/" + target;
}
// now that we've messed with the URL, send a redirect to make
// it official
res.sendRedirect(Web.returnUrl(req, target));
return;
}
// see if we want to change the specifically requested view
String newTarget = redirectRequestedTarget(target);
// make sure it's a valid path
if (!newTarget.startsWith("/")) {
newTarget = "/" + newTarget;
}
if (!newTarget.equals(target)) {
// now that we've messed with the URL, send a redirect to make
// it official
res.sendRedirect(Web.returnUrl(req, newTarget));
return;
}
target = newTarget;
// store this
if (m_defaultToLastView) {
session.setAttribute(LAST_VIEW_VISITED, target);
}
}
// add the configured folder root and extension (if missing)
target = m_path + target;
// add the default JSF extension (if we have no extension)
int lastSlash = target.lastIndexOf("/");
int lastDot = target.lastIndexOf(".");
if (lastDot < 0 || lastDot < lastSlash) {
target += JSF_EXT;
}
// set the information that can be removed from return URLs
req.setAttribute(URL_PATH, m_path);
req.setAttribute(URL_EXT, ".jsp");
// set the sakai request object wrappers to provide the native, not
// Sakai set up, URL information
// - this assures that the FacesServlet can dispatch to the proper view
// based on the path info
req.setAttribute(Tool.NATIVE_URL, Tool.NATIVE_URL);
// TODO: Should setting the HTTP headers be moved up to the portal level
// as well?
res.setContentType("text/html; charset=UTF-8");
res.addDateHeader("Expires", System.currentTimeMillis() - (1000L * 60L * 60L * 24L * 365L));
res.addDateHeader("Last-Modified", System.currentTimeMillis());
res.addHeader("Cache-Control", "no-store, no-cache, must-revalidate, max-age=0, post-check=0, pre-check=0");
res.addHeader("Pragma", "no-cache");
// dispatch to the target
RequestDispatcher dispatcher = getServletContext().getRequestDispatcher(target);
dispatcher.forward(req, res);
// restore the request object
req.removeAttribute(Tool.NATIVE_URL);
req.removeAttribute(URL_PATH);
req.removeAttribute(URL_EXT);
}
protected boolean sendToHelper(HttpServletRequest req, HttpServletResponse res, String target)
throws ToolException
{
String path = req.getPathInfo();
if (path == null)
path = "/";
// if synoptic helper is obscuring add attachment helper, remove it
final int helperIndex = path.indexOf("sakai.filepicker");
// filepicker helper should be first part of path
if (helperIndex > 1 ) {
path = path.substring(helperIndex-1);
target = path;
}
//
// 0 parts means the path was just "/", otherwise parts[0] = "",
// parts[1] = item id, parts[2] if present is "edit"...
String[] parts = path.split("/");
if (parts.length < 2) {
return false;
}
/* if (parts.length < 3) {
// return false;
}*/
if (!parts[1].endsWith(HELPER_EXT)) {
return false;
}
/*
if (!parts[2].endsWith(HELPER_EXT)) {
return false;
}
*/
ToolSession toolSession = SessionManager.getCurrentToolSession();
Enumeration params = req.getParameterNames();
while (params.hasMoreElements()) {
String paramName = (String) params.nextElement();
if (paramName.startsWith(HELPER_SESSION_PREFIX)) {
String attributeName = paramName.substring(HELPER_SESSION_PREFIX.length());
toolSession.setAttribute(attributeName, req.getParameter(paramName));
}
}
// calc helper id
int posEnd = parts[1].lastIndexOf(".");
////int posEnd = parts[2].lastIndexOf(".");
String helperId = target.substring(1, posEnd + 1);
////String helperId = parts[2].substring(0, posEnd);
ActiveTool helperTool = ActiveToolManager.getActiveTool(helperId);
if (toolSession.getAttribute(helperTool.getId() + Tool.HELPER_DONE_URL) == null) {
toolSession.setAttribute(helperTool.getId() + Tool.HELPER_DONE_URL, req.getContextPath() + req.getServletPath() + computeDefaultTarget(true));
}
String context = req.getContextPath() + req.getServletPath() + Web.makePath(parts, 1, 2);
////String context = req.getContextPath() + req.getServletPath() + Web.makePath(parts, 2, 3);
String toolPath = Web.makePath(parts, 2, parts.length);
////String toolPath = Web.makePath(parts, 3, parts.length);
helperTool.help(req, res, context, toolPath);
return true; // was handled as helper call
}
protected String computeDefaultTarget(boolean lastVisited) {
// setup for the default view as configured
String target = "/" + m_default;
// if we are doing lastVisit and there's a last-visited view, for this
// tool placement / user, use that
if (lastVisited) {
ToolSession session = SessionManager.getCurrentToolSession();
String last = (String) session.getAttribute(LAST_VIEW_VISITED);
if (last != null) {
target = last;
}
}
return target;
}
/**
* Initialize the servlet.
*
* @param config
* The servlet config.
* @throws ServletException
*/
public void init(ServletConfig config) throws ServletException {
super.init(config);
try {
//load service level dependecies from the ComponentManager
siteService = (SiteService) ComponentManager.get("org.sakaiproject.site.api.SiteService");
accessProviderManager = (HttpServletAccessProviderManager) ComponentManager
.get("org.sakaiproject.entitybroker.access.HttpServletAccessProviderManager");
forumManager = (DiscussionForumManager) ComponentManager
.get("org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager");
//register forum Entity prefixes for direct servlet request handling
if (accessProviderManager != null) {
accessProviderManager.registerProvider("forum_topic", this);
accessProviderManager.registerProvider("forum", this);
accessProviderManager.registerProvider("forum_message", this);
}
//mark initialization of dependecies as complete
if (siteService != null && forumManager != null)
initComplete = true;
} catch (Exception e) {
e.printStackTrace();
}
}
public void handleAccess(HttpServletRequest req, HttpServletResponse res, EntityReference ref) {
//don't bother if the user is not logged in
if (req.getRemoteUser() == null) {
try {
String url = req.getRequestURL().toString();
String context = req.getContextPath();
String prefix = url.substring(0,url.lastIndexOf(context));
res.sendRedirect(prefix + "/authn/login?url="
+ URLEncoder.encode(req.getRequestURL().toString(), "UTF-8"));
return;
}
catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
catch (IOException e) {
e.printStackTrace();
}
}
String[] parts = req.getPathInfo().split("/");
//ensure we have our dependencies and something reassembling proper input
if (initComplete && parts.length > 2) {
String context = "";
if ("forum_topic".equals(parts[1]))
context = forumManager.getContextForTopicById(Long.valueOf(parts[2]));
else if ("forum".equals(parts[1]))
context = forumManager.getContextForForumById(Long.valueOf(parts[2]));
else if ("forum_message".equals(parts[1]))
context = forumManager.getContextForMessageById(Long.valueOf(parts[2]));
String placementId = "";
String target = "";
//Calculate the placement for the Entity... if you know of a better way, please
//replace this!
try {
if (siteService.getSite(context).getToolForCommonId("sakai.forums") != null) {
placementId = siteService.getSite(context)
.getToolForCommonId("sakai.forums").getId();
}
else {
placementId = siteService.getSite(context)
.getToolForCommonId("sakai.messagecenter").getId();
}
}
catch (IdUnusedException iue) {
iue.printStackTrace();
}
//TODO: I've tried (and failed) a number of things here to try to get this to work
//without a redirect using a block of setting all the placement and session things
//into the the threadlocal manager here resulted in the closest thing... rendered
//the page, but links were broken, might be doable with more time or fresh eyes
//though.
//direct the request to the proper 'direct' view with needed parameters
if ("forum_topic".equals(parts[1])) {
req.setAttribute("topicId", parts[2]);
target = "/jsp/discussionForum/message/dfAllMessagesDirect.jsf?topicId="
+ parts[2] + "&placementId=" + placementId;
}
else if ("forum".equals(parts[1])) {
target = "/jsp/discussionForum/forum/dfForumDirect.jsf?forumId="
+ parts[2] + "&placementId=" + placementId;
}
else if ("forum_message".equals(parts[1])) {
target = "/jsp/discussionForum/message/dfViewMessageDirect.jsf?messageId="
+ parts[2] + "&placementId=" + placementId + "&topicId="
+ forumManager.getMessageById(Long.valueOf(parts[2])).getTopic().getId()
+ "&forumId=" + forumManager.ForumIdForMessage(Long.valueOf(parts[2]));
}
// dispatch to the target
RequestDispatcher dispatcher = getServletContext().getRequestDispatcher(target);
try {
dispatcher.forward(req, res);
}
catch (ServletException e) {
e.printStackTrace();
}
catch (IOException e) {
e.printStackTrace();
}
}
}
}
| MSGCNTR-546
git-svn-id: 65c85f73e8ce8bfb0e9a03896276dfceee826a06@95486 66ffb92e-73f9-0310-93c1-f5514f145a0a
| msgcntr/messageforums-app/src/java/org/sakaiproject/tool/messageforums/MessageForumsFilePickerServlet.java | MSGCNTR-546 |
|
Java | apache-2.0 | b9a08390739035ed70eb4dc50d1e98cd1e3f16f3 | 0 | ProgrammingLife2016/PL4-2016 | package application.factories;
import application.controllers.MainController;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.geometry.Insets;
import javafx.geometry.Pos;
import javafx.geometry.Rectangle2D;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.control.ListView;
import javafx.scene.control.SelectionMode;
import javafx.scene.layout.VBox;
import javafx.scene.text.Text;
import javafx.stage.FileChooser;
import javafx.stage.Modality;
import javafx.stage.Screen;
import javafx.stage.Stage;
import org.apache.commons.io.FilenameUtils;
import java.io.File;
import java.util.ArrayList;
import java.util.Stack;
import java.util.stream.Collectors;
import static java.lang.String.format;
/**
* WindowFactory class.
*
* @version 1.0
* @since 25-04-2016
*/
public final class WindowFactory {
static Rectangle2D screenSize;
static Stage window;
static MainController mainController;
static Scene scene;
/**
* Private class constructor.
*/
private WindowFactory() {
}
/**
* Create method for windows.
*
* @param m parent of the window
* @return the constructed window.
*/
public static Stage createWindow(MainController m) {
mainController = m;
window = new Stage();
scene = createScene(m.getRoot());
screenSize = Screen.getPrimary().getVisualBounds();
window.setWidth(screenSize.getWidth());
window.setHeight(screenSize.getHeight());
window.setMaxHeight(screenSize.getHeight());
window.setScene(scene);
window.show();
return window;
}
/**
* Method to create the scene.
*
* @param parent parent object for the scene.
* @return the constructed scene.
*/
public static Scene createScene(Parent parent) {
Scene scene = new Scene(parent);
return scene;
}
/**
* Method that creates a directoryChooser.
*
* @return the directoryChooser.
*/
public static FileChooser createGraphChooser() {
FileChooser directoryChooser = new FileChooser();
directoryChooser.setTitle("Select Graph File");
Stack<String> mostRecentDir = mainController.getMostRecentDir();
if (!mostRecentDir.isEmpty()) {
File initialFile = new File(mostRecentDir.get(0));
if (initialFile != null && initialFile.isDirectory()) {
directoryChooser.setInitialDirectory(initialFile);
}
}
File selectedFile = directoryChooser.showOpenDialog(window);
if (selectedFile != null) {
mainController.addRecentGFA(selectedFile.getAbsolutePath());
File parentDir = selectedFile.getParentFile();
mainController.addRecentDir(parentDir.getAbsolutePath());
createGFApopup(parentDir, selectedFile);
}
return directoryChooser;
}
/**
* Method to create a pop-up when selecting a NWK File
*
* @param parentDir the current Directory
* @param selectedFile the selected File
*/
@SuppressFBWarnings
public static void createGFApopup(File parentDir, File selectedFile) {
ArrayList<Text> candidates = new ArrayList<>();
if (parentDir.isDirectory()) {
for (File f : parentDir.listFiles()) {
String ext = FilenameUtils.getExtension(f.getName());
if (ext.equals("nwk")) {
Text t = new Text(f.getAbsolutePath());
candidates.add(t);
}
}
}
mainController.setBackground("/background_images/loading.png");
if (!candidates.isEmpty()) {
showPopup(candidates, selectedFile, "NWK");
} else {
mainController.getGraphController().getGraph().getNodeMapFromFile(selectedFile.toString());
mainController.initGraph();
}
}
/**
* Method that creates a directoryChooser.
*
* @return the directoryChooser
*/
public static FileChooser createTreeChooser() {
FileChooser directoryChooser = new FileChooser();
directoryChooser.setTitle("Select Tree File");
Stack<String> mostRecentDir = mainController.getMostRecentDir();
if (!mostRecentDir.isEmpty()) {
File initialFile = new File(mostRecentDir.get(0));
if (initialFile != null && initialFile.isDirectory()) {
directoryChooser.setInitialDirectory(initialFile);
}
}
File selectedFile = directoryChooser.showOpenDialog(window);
if (selectedFile != null) {
File parentDir = selectedFile.getParentFile();
mainController.addRecentDir(parentDir.getAbsolutePath());
createNWKpopup(parentDir, selectedFile);
}
return directoryChooser;
}
/**
* Method the create a pop-up when choosing a GFA File
*
* @param parentDir the current Directory we're at
* @param selectedFile the selected File
*/
@SuppressFBWarnings
public static void createNWKpopup(File parentDir, File selectedFile) {
ArrayList<Text> candidates = new ArrayList<>();
if (parentDir.isDirectory()) {
for (File f : parentDir.listFiles()) {
String ext = FilenameUtils.getExtension(f.getName());
if (ext.equals("gfa")) {
Text t = new Text(f.getAbsolutePath());
candidates.add(t);
}
}
}
mainController.setBackground("/background_images/loading.png");
if (!candidates.isEmpty()) {
showPopup(candidates, selectedFile, "GFA");
} else {
mainController.addRecentNWK(selectedFile.getAbsolutePath());
mainController.initTree(selectedFile.getAbsolutePath());
}
}
/**
* Method to show the created NWK pop-up
*
* @param candidates all candidates which can be loaded next
* @param selectedFile the currently selected GFA File
* @param type The type
*/
public static void showPopup(ArrayList<Text> candidates, File selectedFile, String type) {
Stage tempStage = new Stage();
ListView listView = new ListView();
fillList(listView, candidates);
handleTempStage(tempStage, type, listView);
if (type.toUpperCase().equals("NWK")) {
addGFAEventHandler(listView, selectedFile, tempStage);
} else if (type.toUpperCase().equals("GFA")) {
addNWKEventHandler(listView, selectedFile, tempStage);
}
}
/**
* Method to add the needed EventHandler to the List of Files
*
* @param listView the List of Files
* @param selectedGFAFile the Files which is selected
* @param tempStage the currently showed Stage
*/
public static void addGFAEventHandler(ListView listView, File selectedGFAFile, Stage tempStage) {
listView.setOnMouseClicked(event -> {
Text file = (Text) listView.getSelectionModel().getSelectedItem();
File nwk = new File(file.getText());
tempStage.hide();
if (!mainController.isMetaDataLoaded()) {
createMetaDatapopup(selectedGFAFile, nwk);
} else {
mainController.addRecentGFA(selectedGFAFile.getAbsolutePath());
mainController.getGraphController().getGraph().getNodeMapFromFile(selectedGFAFile.getAbsolutePath());
mainController.initGraph();
mainController.addRecentNWK(nwk.getAbsolutePath());
mainController.initTree(nwk.getAbsolutePath());
createMenuWithSearch();
}
});
}
/**
* Method to create the MetaData Pop-up
*
* @param gfaFile the earlier chosen GFA-File
* @param nwkFile the earlier chosen NWK-File
*/
public static void createMetaDatapopup(File gfaFile, File nwkFile) {
ArrayList<Text> candidates = new ArrayList<>();
File parentDir = gfaFile.getParentFile();
if (parentDir.isDirectory()) {
File[] fileList = parentDir.listFiles();
if (fileList != null) {
for (File f : fileList) {
String ext = FilenameUtils.getExtension(f.getName());
if (ext.equals("xlsx")) {
Text t = new Text(f.getAbsolutePath());
candidates.add(t);
}
}
}
}
File[] files = new File[4];
files[0] = gfaFile;
files[1] = nwkFile;
mainController.setBackground("/background_images/loading.png");
if (!candidates.isEmpty()) {
showMetaAnnoPopup(candidates, files, "metaData");
} else {
mainController.getGraphController().getGraph().getNodeMapFromFile(gfaFile.toString());
mainController.initGraph();
mainController.addRecentNWK(nwkFile.getAbsolutePath());
mainController.initTree(nwkFile.getAbsolutePath());
}
}
/**
* Method to create a PopUp when no Annotation Data is loaded
*
* @param files the currently chosen files
*/
public static void createAnnotationPopup(File[] files) {
ArrayList<Text> candidates = new ArrayList<>();
File gfaFile = files[0];
File parentDir = gfaFile.getParentFile();
if (parentDir.isDirectory()) {
File[] fileList = parentDir.listFiles();
if (fileList != null) {
for (File f : fileList) {
String ext = FilenameUtils.getExtension(f.getName());
if (ext.equals("gff")) {
Text t = new Text(f.getAbsolutePath());
candidates.add(t);
}
}
}
}
if (!candidates.isEmpty()) {
showMetaAnnoPopup(candidates, files, "gff");
} else {
chooseCorrectFile(files);
}
}
/**
* Method to create and show the MetaData Pop-up
*
* @param candidates all candidates that can be opened
* @param files the list of selected Files
* @param type the type
*/
public static void showMetaAnnoPopup(ArrayList<Text> candidates, File[] files, String type) {
Stage tempStage = new Stage();
ListView listView = new ListView();
fillList(listView, candidates);
handleTempStage(tempStage, type, listView);
if (type.equals("metaData")) {
addMetaDataEventHandler(listView, files, tempStage);
}
if (type.equals("gff")) {
addGFFEventHandler(listView, files, tempStage);
}
}
private static void fillList(ListView listView, ArrayList<Text> candidates) {
ObservableList<Text> list = FXCollections.observableArrayList();
listView.getStylesheets().add("/css/popup.css");
listView.setMinWidth(450);
listView.setPrefWidth(450);
listView.getSelectionModel().setSelectionMode(SelectionMode.SINGLE);
for (Text t : candidates) {
t.setWrappingWidth(listView.getPrefWidth());
t.getStyleClass().add("text");
}
list.addAll(candidates.stream().collect(Collectors.toList()));
listView.setItems(list);
}
/**
* Method to create the popup
*
* @param tempStage the Stage to show it on
* @param type the type of File we want to load
* @param listView the ListView to add the information to
*/
private static void handleTempStage(Stage tempStage, String type, ListView listView) {
Text text = new Text("Do you also want to load one of the following files? If not, exit.");
text.setWrappingWidth(listView.getPrefWidth());
text.getStyleClass().add("title");
VBox vBox = new VBox();
vBox.getStylesheets().add("/css/popup.css");
vBox.getStyleClass().add("vBox");
vBox.getChildren().addAll(text, listView);
Scene tempScene = new Scene(vBox);
tempStage.setScene(tempScene);
tempStage.initModality(Modality.APPLICATION_MODAL);
tempStage.setTitle(format("Load additional %s file", type));
tempStage.setResizable(false);
tempStage.show();
}
/**
* Method to add the needed EventHandler to the List of Files
*
* @param listView the List of Files
* @param selectedFile the Files which is selected
* @param tempStage the currently showed Stage
*/
public static void addNWKEventHandler(ListView listView, File selectedFile, Stage tempStage) {
listView.setOnMouseClicked(event -> {
Text file = (Text) listView.getSelectionModel().getSelectedItem();
File nwk = new File(file.getText());
tempStage.hide();
if (!mainController.isMetaDataLoaded()) {
createMetaDatapopup(selectedFile, nwk);
} else {
mainController.getGraphController().getGraph().getNodeMapFromFile(nwk.getAbsolutePath());
mainController.initGraph();
mainController.addRecentNWK(selectedFile.getAbsolutePath());
mainController.initTree(selectedFile.getAbsolutePath());
mainController.addRecentGFA(nwk.getAbsolutePath());
createMenuWithSearchWithoutAnnotation();
}
});
}
/**
* Method to add EventHandlers to the MetaData Pop-uo
*
* @param listView the listView showing
* @param files the list of chosen Files
* @param tempStage the Stage of the shown window
*/
public static void addMetaDataEventHandler(ListView listView, File[] files, Stage tempStage) {
listView.setOnMouseClicked(event -> {
Text file = (Text) listView.getSelectionModel().getSelectedItem();
File meta = new File(file.getText());
tempStage.hide();
files[2] = meta;
createAnnotationPopup(files);
});
}
/**
* Method to add an Event Handler to the GFF (Annotation) Pop Up
*
* @param listView the listView showing
* @param files the list of chosen Files
* @param tempStage the Stage of the shown window
*/
public static void addGFFEventHandler(ListView listView, File[] files, Stage tempStage) {
listView.setOnMouseClicked(event -> {
Text file = (Text) listView.getSelectionModel().getSelectedItem();
File annotation = new File(file.getText());
files[3] = annotation;
tempStage.hide();
if (files[0] != null && files[1] != null && files[2] != null) {
chooseCorrectFile(files);
}
});
tempStage.setOnCloseRequest(event -> chooseCorrectFile(files));
}
/**
* Method to make sure we load the file with the right method
*
* @param files the files to load
*/
public static void chooseCorrectFile(File[] files) {
File gfaFile = files[0];
File nwkFile = files[1];
File metaFile = files[2];
File annoFile = files[3];
if (FilenameUtils.getExtension(gfaFile.getName()).equals("nwk")) {
mainController.getGraphController().getGraph().getNodeMapFromFile(nwkFile.getAbsolutePath());
mainController.initGraph();
mainController.addRecentGFA(nwkFile.getAbsolutePath());
mainController.initMetadata(metaFile.getAbsolutePath());
mainController.initTree(gfaFile.getAbsolutePath());
mainController.addRecentNWK(gfaFile.getAbsolutePath());
if (annoFile != null) {
mainController.initAnnotations(annoFile.getAbsolutePath());
}
createMenuWithSearch();
} else {
mainController.getGraphController().getGraph().getNodeMapFromFile(gfaFile.getAbsolutePath());
mainController.initGraph();
mainController.addRecentGFA(gfaFile.getAbsolutePath());
mainController.initMetadata(metaFile.getAbsolutePath());
mainController.initTree(nwkFile.getAbsolutePath());
mainController.addRecentNWK(nwkFile.getAbsolutePath());
if (annoFile != null) {
mainController.initAnnotations(annoFile.getAbsolutePath());
}
createMenuWithSearch();
}
}
/**
* Method that creates a directoryChooser.
*
* @param s the title of the directoryChooser
* @return the directoryChooser
*/
public static FileChooser createAnnotationChooser(String s) {
FileChooser directoryChooser = new FileChooser();
directoryChooser.setTitle(s);
Stack<String> mostRecentDir = mainController.getMostRecentDir();
if (!mostRecentDir.isEmpty()) {
File initialFile = new File(mostRecentDir.get(0));
if (initialFile != null && initialFile.isDirectory()) {
directoryChooser.setInitialDirectory(initialFile);
}
}
File selectedFile = directoryChooser.showOpenDialog(window);
if (selectedFile != null) {
mainController.initAnnotations(selectedFile.getAbsolutePath());
mainController.addRecentGFF(selectedFile.getAbsolutePath());
}
return directoryChooser;
}
/**
* Method to create to alert
* Shown when no strains are selected but we still want to see the graph
*/
public static void createAlert() {
final Stage dialog = new Stage();
dialog.initModality(Modality.APPLICATION_MODAL);
dialog.setResizable(false);
dialog.setTitle("Not enough strains selected");
VBox content = new VBox();
addAlertComponents(content, dialog, "Please select two or more strains first.");
Scene dialogScene = new Scene(content, 200, 100);
dialog.setScene(dialogScene);
dialog.show();
}
/**
* Method to create to alert
* Shown when searched no annotation cannot be found
*/
public static void createAnnNotFoundAlert() {
final Stage dialog = new Stage();
dialog.initModality(Modality.APPLICATION_MODAL);
dialog.setResizable(false);
dialog.setTitle("Annotation not found.");
VBox content = new VBox();
addAlertComponents(content, dialog, "Your search does not match any of the present annotations.");
Scene dialogScene = new Scene(content, 200, 100);
dialog.setScene(dialogScene);
dialog.show();
}
/**
* Method to add the components to the alert pop up
*
* @param content the content to be set
* @param dialog the dialog to add the content to
* @param textToAdd The text to be displayed in the popup
*/
public static void addAlertComponents(VBox content, Stage dialog, String textToAdd) {
content.getStylesheets().add("/css/popup.css");
content.getStyleClass().add("vBox");
content.setAlignment(Pos.CENTER);
content.setPadding(new Insets(10, 10, 10, 10));
content.setSpacing(10);
Text text = new Text(textToAdd);
text.getStyleClass().add("text");
Button ok = new Button();
ok.getStyleClass().add("button");
ok.setText("OK");
ok.setOnMouseClicked(event -> dialog.hide());
content.getChildren().addAll(text, ok);
}
/**
* Creates the menu including a search bar with the annotations search box.
*/
public static void createMenuWithSearch() {
mainController.createMenu(true, true);
}
/**
* Creates the menu including a search bar without the annotations search box.
*/
public static void createMenuWithSearchWithoutAnnotation() {
mainController.createMenu(true, false);
}
} | src/main/java/application/factories/WindowFactory.java | package application.factories;
import application.controllers.MainController;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.geometry.Insets;
import javafx.geometry.Pos;
import javafx.geometry.Rectangle2D;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.control.ListView;
import javafx.scene.control.SelectionMode;
import javafx.scene.layout.VBox;
import javafx.scene.text.Text;
import javafx.stage.FileChooser;
import javafx.stage.Modality;
import javafx.stage.Screen;
import javafx.stage.Stage;
import org.apache.commons.io.FilenameUtils;
import java.io.File;
import java.util.ArrayList;
import java.util.Stack;
import java.util.stream.Collectors;
import static java.lang.String.format;
/**
* WindowFactory class.
*
* @version 1.0
* @since 25-04-2016
*/
public final class WindowFactory {
static Rectangle2D screenSize;
static Stage window;
static MainController mainController;
static Scene scene;
/**
* Private class constructor.
*/
private WindowFactory() {
}
/**
* Create method for windows.
*
* @param m parent of the window
* @return the constructed window.
*/
public static Stage createWindow(MainController m) {
mainController = m;
window = new Stage();
scene = createScene(m.getRoot());
screenSize = Screen.getPrimary().getVisualBounds();
window.setWidth(screenSize.getWidth());
window.setHeight(screenSize.getHeight());
window.setMaxHeight(screenSize.getHeight());
window.setScene(scene);
window.show();
return window;
}
/**
* Method to create the scene.
*
* @param parent parent object for the scene.
* @return the constructed scene.
*/
public static Scene createScene(Parent parent) {
Scene scene = new Scene(parent);
return scene;
}
/**
* Method that creates a directoryChooser.
*
* @return the directoryChooser.
*/
public static FileChooser createGraphChooser() {
FileChooser directoryChooser = new FileChooser();
directoryChooser.setTitle("Select Graph File");
Stack<String> mostRecentDir = mainController.getMostRecentDir();
if (!mostRecentDir.isEmpty()) {
File initialFile = new File(mostRecentDir.get(0));
if (initialFile != null && initialFile.isDirectory()) {
directoryChooser.setInitialDirectory(initialFile);
}
}
File selectedFile = directoryChooser.showOpenDialog(window);
mainController.addRecentGFA(selectedFile.getAbsolutePath());
File parentDir = selectedFile.getParentFile();
mainController.addRecentDir(parentDir.getAbsolutePath());
createGFApopup(parentDir, selectedFile);
return directoryChooser;
}
/**
* Method to create a pop-up when selecting a NWK File
*
* @param parentDir the current Directory
* @param selectedFile the selected File
*/
@SuppressFBWarnings
public static void createGFApopup(File parentDir, File selectedFile) {
ArrayList<Text> candidates = new ArrayList<>();
if (parentDir.isDirectory()) {
for (File f : parentDir.listFiles()) {
String ext = FilenameUtils.getExtension(f.getName());
if (ext.equals("nwk")) {
Text t = new Text(f.getAbsolutePath());
candidates.add(t);
}
}
}
mainController.setBackground("/background_images/loading.png");
if (!candidates.isEmpty()) {
showPopup(candidates, selectedFile, "NWK");
} else {
mainController.getGraphController().getGraph().getNodeMapFromFile(selectedFile.toString());
mainController.initGraph();
}
}
/**
* Method that creates a directoryChooser.
*
* @return the directoryChooser
*/
public static FileChooser createTreeChooser() {
FileChooser directoryChooser = new FileChooser();
directoryChooser.setTitle("Select Tree File");
Stack<String> mostRecentDir = mainController.getMostRecentDir();
if (!mostRecentDir.isEmpty()) {
File initialFile = new File(mostRecentDir.get(0));
if (initialFile != null && initialFile.isDirectory()) {
directoryChooser.setInitialDirectory(initialFile);
}
}
File selectedFile = directoryChooser.showOpenDialog(window);
File parentDir = selectedFile.getParentFile();
mainController.addRecentDir(parentDir.getAbsolutePath());
createNWKpopup(parentDir, selectedFile);
return directoryChooser;
}
/**
* Method the create a pop-up when choosing a GFA File
*
* @param parentDir the current Directory we're at
* @param selectedFile the selected File
*/
@SuppressFBWarnings
public static void createNWKpopup(File parentDir, File selectedFile) {
ArrayList<Text> candidates = new ArrayList<>();
if (parentDir.isDirectory()) {
for (File f : parentDir.listFiles()) {
String ext = FilenameUtils.getExtension(f.getName());
if (ext.equals("gfa")) {
Text t = new Text(f.getAbsolutePath());
candidates.add(t);
}
}
}
mainController.setBackground("/background_images/loading.png");
if (!candidates.isEmpty()) {
showPopup(candidates, selectedFile, "GFA");
} else {
mainController.addRecentNWK(selectedFile.getAbsolutePath());
mainController.initTree(selectedFile.getAbsolutePath());
}
}
/**
* Method to show the created NWK pop-up
*
* @param candidates all candidates which can be loaded next
* @param selectedFile the currently selected GFA File
* @param type The type
*/
public static void showPopup(ArrayList<Text> candidates, File selectedFile, String type) {
Stage tempStage = new Stage();
ListView listView = new ListView();
fillList(listView, candidates);
handleTempStage(tempStage, type, listView);
if (type.toUpperCase().equals("NWK")) {
addGFAEventHandler(listView, selectedFile, tempStage);
} else if (type.toUpperCase().equals("GFA")) {
addNWKEventHandler(listView, selectedFile, tempStage);
}
}
/**
* Method to add the needed EventHandler to the List of Files
*
* @param listView the List of Files
* @param selectedGFAFile the Files which is selected
* @param tempStage the currently showed Stage
*/
public static void addGFAEventHandler(ListView listView, File selectedGFAFile, Stage tempStage) {
listView.setOnMouseClicked(event -> {
Text file = (Text) listView.getSelectionModel().getSelectedItem();
File nwk = new File(file.getText());
tempStage.hide();
if (!mainController.isMetaDataLoaded()) {
createMetaDatapopup(selectedGFAFile, nwk);
} else {
mainController.addRecentGFA(selectedGFAFile.getAbsolutePath());
mainController.getGraphController().getGraph().getNodeMapFromFile(selectedGFAFile.getAbsolutePath());
mainController.initGraph();
mainController.addRecentNWK(nwk.getAbsolutePath());
mainController.initTree(nwk.getAbsolutePath());
createMenuWithSearch();
}
});
}
/**
* Method to create the MetaData Pop-up
*
* @param gfaFile the earlier chosen GFA-File
* @param nwkFile the earlier chosen NWK-File
*/
public static void createMetaDatapopup(File gfaFile, File nwkFile) {
ArrayList<Text> candidates = new ArrayList<>();
File parentDir = gfaFile.getParentFile();
if (parentDir.isDirectory()) {
File[] fileList = parentDir.listFiles();
if (fileList != null) {
for (File f : fileList) {
String ext = FilenameUtils.getExtension(f.getName());
if (ext.equals("xlsx")) {
Text t = new Text(f.getAbsolutePath());
candidates.add(t);
}
}
}
}
File[] files = new File[4];
files[0] = gfaFile;
files[1] = nwkFile;
mainController.setBackground("/background_images/loading.png");
if (!candidates.isEmpty()) {
showMetaAnnoPopup(candidates, files, "metaData");
} else {
mainController.getGraphController().getGraph().getNodeMapFromFile(gfaFile.toString());
mainController.initGraph();
mainController.addRecentNWK(nwkFile.getAbsolutePath());
mainController.initTree(nwkFile.getAbsolutePath());
}
}
/**
* Method to create a PopUp when no Annotation Data is loaded
* @param files the currently chosen files
*/
public static void createAnnotationPopup(File[] files) {
ArrayList<Text> candidates = new ArrayList<>();
File gfaFile = files[0];
File parentDir = gfaFile.getParentFile();
if (parentDir.isDirectory()) {
File[] fileList = parentDir.listFiles();
if (fileList != null) {
for (File f : fileList) {
String ext = FilenameUtils.getExtension(f.getName());
if (ext.equals("gff")) {
Text t = new Text(f.getAbsolutePath());
candidates.add(t);
}
}
}
}
if (!candidates.isEmpty()) {
showMetaAnnoPopup(candidates, files, "gff");
} else {
chooseCorrectFile(files);
}
}
/**
* Method to create and show the MetaData Pop-up
*
* @param candidates all candidates that can be opened
* @param files the list of selected Files
* @param type the type
*/
public static void showMetaAnnoPopup(ArrayList<Text> candidates, File[] files, String type) {
Stage tempStage = new Stage();
ListView listView = new ListView();
fillList(listView, candidates);
handleTempStage(tempStage, type, listView);
if (type.equals("metaData")) {
addMetaDataEventHandler(listView, files, tempStage);
}
if (type.equals("gff")) {
addGFFEventHandler(listView, files, tempStage);
}
}
private static void fillList(ListView listView, ArrayList<Text> candidates) {
ObservableList<Text> list = FXCollections.observableArrayList();
listView.getStylesheets().add("/css/popup.css");
listView.setMinWidth(450);
listView.setPrefWidth(450);
listView.getSelectionModel().setSelectionMode(SelectionMode.SINGLE);
for (Text t : candidates) {
t.setWrappingWidth(listView.getPrefWidth());
t.getStyleClass().add("text");
}
list.addAll(candidates.stream().collect(Collectors.toList()));
listView.setItems(list);
}
/**
* Method to create the popup
*
* @param tempStage the Stage to show it on
* @param type the type of File we want to load
* @param listView the ListView to add the information to
*/
private static void handleTempStage(Stage tempStage, String type, ListView listView) {
Text text = new Text("Do you also want to load one of the following files? If not, exit.");
text.setWrappingWidth(listView.getPrefWidth());
text.getStyleClass().add("title");
VBox vBox = new VBox();
vBox.getStylesheets().add("/css/popup.css");
vBox.getStyleClass().add("vBox");
vBox.getChildren().addAll(text, listView);
Scene tempScene = new Scene(vBox);
tempStage.setScene(tempScene);
tempStage.initModality(Modality.APPLICATION_MODAL);
tempStage.setTitle(format("Load additional %s file", type));
tempStage.setResizable(false);
tempStage.show();
}
/**
* Method to add the needed EventHandler to the List of Files
*
* @param listView the List of Files
* @param selectedFile the Files which is selected
* @param tempStage the currently showed Stage
*/
public static void addNWKEventHandler(ListView listView, File selectedFile, Stage tempStage) {
listView.setOnMouseClicked(event -> {
Text file = (Text) listView.getSelectionModel().getSelectedItem();
File nwk = new File(file.getText());
tempStage.hide();
if (!mainController.isMetaDataLoaded()) {
createMetaDatapopup(selectedFile, nwk);
} else {
mainController.getGraphController().getGraph().getNodeMapFromFile(nwk.getAbsolutePath());
mainController.initGraph();
mainController.addRecentNWK(selectedFile.getAbsolutePath());
mainController.initTree(selectedFile.getAbsolutePath());
mainController.addRecentGFA(nwk.getAbsolutePath());
createMenuWithSearchWithoutAnnotation();
}
});
}
/**
* Method to add EventHandlers to the MetaData Pop-uo
*
* @param listView the listView showing
* @param files the list of chosen Files
* @param tempStage the Stage of the shown window
*/
public static void addMetaDataEventHandler(ListView listView, File[] files, Stage tempStage) {
listView.setOnMouseClicked(event -> {
Text file = (Text) listView.getSelectionModel().getSelectedItem();
File meta = new File(file.getText());
tempStage.hide();
files[2] = meta;
createAnnotationPopup(files);
});
}
/**
* Method to add an Event Handler to the GFF (Annotation) Pop Up
* @param listView the listView showing
* @param files the list of chosen Files
* @param tempStage the Stage of the shown window
*/
public static void addGFFEventHandler(ListView listView, File[] files, Stage tempStage) {
listView.setOnMouseClicked(event -> {
Text file = (Text) listView.getSelectionModel().getSelectedItem();
File annotation = new File(file.getText());
files[3] = annotation;
tempStage.hide();
if (files[0] != null && files[1] != null && files[2] != null) {
chooseCorrectFile(files);
}
});
tempStage.setOnCloseRequest(event -> chooseCorrectFile(files));
}
/**
* Method to make sure we load the file with the right method
* @param files the files to load
*/
public static void chooseCorrectFile(File[] files) {
File gfaFile = files[0];
File nwkFile = files[1];
File metaFile = files[2];
File annoFile = files[3];
if (FilenameUtils.getExtension(gfaFile.getName()).equals("nwk")) {
mainController.getGraphController().getGraph().getNodeMapFromFile(nwkFile.getAbsolutePath());
mainController.initGraph();
mainController.addRecentGFA(nwkFile.getAbsolutePath());
mainController.initMetadata(metaFile.getAbsolutePath());
mainController.initTree(gfaFile.getAbsolutePath());
mainController.addRecentNWK(gfaFile.getAbsolutePath());
if (annoFile != null) {
mainController.initAnnotations(annoFile.getAbsolutePath());
}
createMenuWithSearch();
} else {
mainController.getGraphController().getGraph().getNodeMapFromFile(gfaFile.getAbsolutePath());
mainController.initGraph();
mainController.addRecentGFA(gfaFile.getAbsolutePath());
mainController.initMetadata(metaFile.getAbsolutePath());
mainController.initTree(nwkFile.getAbsolutePath());
mainController.addRecentNWK(nwkFile.getAbsolutePath());
if (annoFile != null) {
mainController.initAnnotations(annoFile.getAbsolutePath());
}
createMenuWithSearch();
}
}
/**
* Method that creates a directoryChooser.
*
* @param s the title of the directoryChooser
* @return the directoryChooser
*/
public static FileChooser createAnnotationChooser(String s) {
FileChooser directoryChooser = new FileChooser();
directoryChooser.setTitle(s);
Stack<String> mostRecentDir = mainController.getMostRecentDir();
if (!mostRecentDir.isEmpty()) {
File initialFile = new File(mostRecentDir.get(0));
if (initialFile != null && initialFile.isDirectory()) {
directoryChooser.setInitialDirectory(initialFile);
}
}
File selectedFile = directoryChooser.showOpenDialog(window);
mainController.initAnnotations(selectedFile.getAbsolutePath());
mainController.addRecentGFF(selectedFile.getAbsolutePath());
return directoryChooser;
}
/**
* Method to create to alert
* Shown when no strains are selected but we still want to see the graph
*/
public static void createAlert() {
final Stage dialog = new Stage();
dialog.initModality(Modality.APPLICATION_MODAL);
dialog.setResizable(false);
dialog.setTitle("Not enough strains selected");
VBox content = new VBox();
addAlertComponents(content, dialog, "Please select two or more strains first.");
Scene dialogScene = new Scene(content, 200, 100);
dialog.setScene(dialogScene);
dialog.show();
}
/**
* Method to create to alert
* Shown when searched no annotation cannot be found
*/
public static void createAnnNotFoundAlert() {
final Stage dialog = new Stage();
dialog.initModality(Modality.APPLICATION_MODAL);
dialog.setResizable(false);
dialog.setTitle("Annotation not found.");
VBox content = new VBox();
addAlertComponents(content, dialog, "Your search does not match any of the present annotations.");
Scene dialogScene = new Scene(content, 200, 100);
dialog.setScene(dialogScene);
dialog.show();
}
/**
* Method to add the components to the alert pop up
*
* @param content the content to be set
* @param dialog the dialog to add the content to
* @param textToAdd The text to be displayed in the popup
*/
public static void addAlertComponents(VBox content, Stage dialog, String textToAdd) {
content.getStylesheets().add("/css/popup.css");
content.getStyleClass().add("vBox");
content.setAlignment(Pos.CENTER);
content.setPadding(new Insets(10, 10, 10, 10));
content.setSpacing(10);
Text text = new Text(textToAdd);
text.getStyleClass().add("text");
Button ok = new Button();
ok.getStyleClass().add("button");
ok.setText("OK");
ok.setOnMouseClicked(event -> dialog.hide());
content.getChildren().addAll(text, ok);
}
/**
* Creates the menu including a search bar with the annotations search box.
*/
public static void createMenuWithSearch() {
mainController.createMenu(true, true);
}
/**
* Creates the menu including a search bar without the annotations search box.
*/
public static void createMenuWithSearchWithoutAnnotation() {
mainController.createMenu(true, false);
}
} | Fix null pointer when pressing cancel
| src/main/java/application/factories/WindowFactory.java | Fix null pointer when pressing cancel |
|
Java | apache-2.0 | 7a973b099d6331de3979f9c20e15308570a737f2 | 0 | BigDataBoutique/elasticsearch-repository-swift | /*
* Copyright 2017 Wikimedia and BigData Boutique
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wikimedia.elasticsearch.swift.repositories.blobstore;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.blobstore.BlobContainer;
import org.elasticsearch.common.blobstore.BlobMetaData;
import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.DeleteResult;
import org.elasticsearch.common.blobstore.support.AbstractBlobContainer;
import org.elasticsearch.common.blobstore.support.PlainBlobMetaData;
import org.elasticsearch.threadpool.ThreadPool;
import org.javaswift.joss.client.core.ContainerPaginationMap;
import org.javaswift.joss.exception.NotFoundException;
import org.javaswift.joss.model.Container;
import org.javaswift.joss.model.Directory;
import org.javaswift.joss.model.DirectoryOrObject;
import org.javaswift.joss.model.StoredObject;
import org.wikimedia.elasticsearch.swift.SwiftPerms;
import org.wikimedia.elasticsearch.swift.WithTimeout;
import org.wikimedia.elasticsearch.swift.repositories.SwiftRepository;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.NoSuchFileException;
import java.nio.file.FileAlreadyExistsException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
/**
* Swift's implementation of the AbstractBlobContainer
*/
public class SwiftBlobContainer extends AbstractBlobContainer {
private static final Logger logger = LogManager.getLogger(SwiftBlobContainer.class);
// Our local swift blob store instance
private final SwiftBlobStore blobStore;
private final SwiftRepository repository;
// The root path for blobs. Used by buildKey to build full blob names
private final String keyPath;
private final boolean blobExistsCheckAllowed;
private final int retryIntervalS;
private final int shortOperationTimeoutS;
private final ExecutorService executor;
/**
* Constructor
* @param blobStore The blob store to use for operations
* @param path The BlobPath to find blobs in
*/
protected SwiftBlobContainer(SwiftBlobStore blobStore, BlobPath path) {
super(path);
this.blobStore = blobStore;
this.repository = blobStore.getRepository();
this.executor = repository != null ? repository.threadPool().executor(ThreadPool.Names.SNAPSHOT) : null;
String keyPath = path.buildAsString();
this.keyPath = keyPath.isEmpty() || keyPath.endsWith("/") ? keyPath : keyPath + "/";
boolean minimizeBlobExistsChecks = SwiftRepository.Swift.MINIMIZE_BLOB_EXISTS_CHECKS_SETTING.get(blobStore.getSettings());
this.blobExistsCheckAllowed = keyPath.isEmpty() || !minimizeBlobExistsChecks;
this.retryIntervalS = SwiftRepository.Swift.RETRY_INTERVAL_S.get(blobStore.getSettings());
this.shortOperationTimeoutS = SwiftRepository.Swift.SHORT_OPERATION_TIMEOUT_S.get(blobStore.getSettings());
}
/**
* Delete a blob. Straightforward.
* @param blobName A blob to delete
*/
@Override
public void deleteBlob(final String blobName) throws IOException {
if (executor == null) {
try {
internalDeleteBlob(blobName);
return;
}
catch (Exception e) {
throw new IOException(e);
}
}
Future<DeleteResult> task = executor.submit(() -> internalDeleteBlob(blobName));
repository.addDeletion(blobName, task);
}
private DeleteResult internalDeleteBlob(String blobName) throws InterruptedException, ExecutionException, TimeoutException {
return WithTimeout.retry(retryIntervalS, shortOperationTimeoutS, TimeUnit.SECONDS, () -> {
try {
return SwiftPerms.exec(() -> {
StoredObject object = blobStore.getContainer().getObject(buildKey(blobName));
long contentLength = object.getContentLength();
object.delete();
return new DeleteResult(1, contentLength);
});
}
catch (NotFoundException e) {
logger.warn("Blob [" + buildKey(blobName) + "] cannot be deleted", e);
throw e;
}
});
}
@Override
public DeleteResult delete() throws IOException {
Collection<StoredObject> containerObjects = SwiftPerms.exec(() -> {
Container container = blobStore.getContainer();
ContainerPaginationMap containerPaginationMap = new ContainerPaginationMap(container, keyPath, container.getMaxPageSize());
return containerPaginationMap.listAllItems();
});
DeleteResult results = DeleteResult.ZERO;
ArrayList<Exception> errors = new ArrayList<>();
for (StoredObject so: containerObjects) {
try {
long size = SwiftPerms.exec(so::getContentLength);
deleteBlob(so.getName().substring(keyPath.length())); //SwiftPerms checked internally
results = results.add(1, size);
} catch (Exception e) {
errors.add(e);
}
}
if (errors.isEmpty()) {
return results;
}
String message = errors.stream().map(Exception::getMessage).collect(Collectors.joining(","));
throw new IOException(message);
}
/**
* Get the blobs matching a given prefix
* @param blobNamePrefix The prefix to look for blobs with
* @return blobs metadata
*/
@Override
public Map<String, BlobMetaData> listBlobsByPrefix(@Nullable final String blobNamePrefix) throws IOException {
String directoryKey = blobNamePrefix == null ? keyPath : buildKey(blobNamePrefix);
try {
Collection<DirectoryOrObject> directoryList = SwiftPerms.exec(() ->
blobStore.getContainer().listDirectory(new Directory(directoryKey, '/'))
);
HashMap<String, PlainBlobMetaData> blobMap = new HashMap<>();
for (DirectoryOrObject obj: directoryList) {
if (obj.isObject()) {
String name = obj.getName().substring(keyPath.length());
Long length = SwiftPerms.exec(() -> obj.getAsObject().getContentLength());
PlainBlobMetaData meta = new PlainBlobMetaData(name, length);
blobMap.put(name, meta);
}
}
return Collections.unmodifiableMap(blobMap);
} catch (NotFoundException e) {
NoSuchFileException e2 = new NoSuchFileException("Cannot list blobs in directory [" + directoryKey + "]");
e2.initCause(e);
throw e2;
}
}
/**
* Get all the blobs
*/
@Override
public Map<String, BlobMetaData> listBlobs() throws IOException {
return listBlobsByPrefix(null);
}
@Override
public Map<String, BlobContainer> children() {
Collection<DirectoryOrObject> objects = SwiftPerms.exec(() -> blobStore.getContainer().listDirectory(new Directory(keyPath, '/')));
HashMap<String, BlobContainer> blobMap = new HashMap<>();
for (DirectoryOrObject obj: objects) {
if (obj.isDirectory()){
String name = obj.getBareName();
BlobContainer blobContainer = blobStore.blobContainer(new BlobPath().add(obj.getName()));
blobMap.put(name, blobContainer);
}
}
return Collections.unmodifiableMap(blobMap);
}
/**
* Build a key for a blob name, based on the keyPath
* @param blobName The blob name to build a key for
* @return the key
*/
private String buildKey(String blobName) {
return keyPath + blobName;
}
/**
* Fetch a given blob into a BufferedInputStream
* @param blobName The blob name to read
* @return a stream
*/
@Override
public InputStream readBlob(final String blobName) throws IOException {
try {
return WithTimeout.retry(retryIntervalS, shortOperationTimeoutS, TimeUnit.SECONDS, () -> {
try {
InputStream downloadStream = SwiftPerms.exec(() ->
blobStore.getContainer().getObject(buildKey(blobName)).downloadObjectAsInputStream()
);
return new BufferedInputStream(downloadStream, (int) blobStore.getBufferSizeInBytes());
}
catch (NotFoundException e) {
logger.warn("Blob object [" + buildKey(blobName) + "] cannot be read", e);
throw e;
}
});
}
catch(Exception e) {
throw new IOException(e);
}
}
@Override
public void writeBlob(final String blobName,
final InputStream in,
final long blobSize,
boolean failIfAlreadyExists) throws IOException {
if (executor == null) {
internalWriteBlob(blobName, in, failIfAlreadyExists);
return;
}
Future<Void> task = executor.submit(() -> {
internalWriteBlob(blobName, in, failIfAlreadyExists);
return null;
});
repository.addWrite(blobName, task);
}
private void internalWriteBlob(String blobName, InputStream in, boolean failIfAlreadyExists) throws IOException {
try {
SwiftPerms.execThrows(() -> {
StoredObject blob = blobStore.getContainer().getObject(buildKey(blobName));
if (failIfAlreadyExists && blobExistsCheckAllowed && blob.exists()) {
throw new FileAlreadyExistsException("blob [" + buildKey(blobName) + "] already exists, cannot overwrite");
}
blob.uploadObject(in);
});
}
catch (IOException | RuntimeException e) {
throw e;
}
catch (Exception e) {
throw new IOException(e);
}
}
@Override
public void writeBlobAtomic(String blobName,
InputStream inputStream,
long blobSize,
boolean failIfAlreadyExists) throws IOException {
writeBlob(blobName, inputStream, blobSize, failIfAlreadyExists);
}
}
| src/main/java/org/wikimedia/elasticsearch/swift/repositories/blobstore/SwiftBlobContainer.java | /*
* Copyright 2017 Wikimedia and BigData Boutique
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wikimedia.elasticsearch.swift.repositories.blobstore;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.blobstore.BlobContainer;
import org.elasticsearch.common.blobstore.BlobMetaData;
import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.DeleteResult;
import org.elasticsearch.common.blobstore.support.AbstractBlobContainer;
import org.elasticsearch.common.blobstore.support.PlainBlobMetaData;
import org.elasticsearch.threadpool.ThreadPool;
import org.javaswift.joss.client.core.ContainerPaginationMap;
import org.javaswift.joss.exception.NotFoundException;
import org.javaswift.joss.model.Container;
import org.javaswift.joss.model.Directory;
import org.javaswift.joss.model.DirectoryOrObject;
import org.javaswift.joss.model.StoredObject;
import org.wikimedia.elasticsearch.swift.SwiftPerms;
import org.wikimedia.elasticsearch.swift.repositories.SwiftRepository;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.NoSuchFileException;
import java.nio.file.FileAlreadyExistsException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.stream.Collectors;
/**
* Swift's implementation of the AbstractBlobContainer
*/
public class SwiftBlobContainer extends AbstractBlobContainer {
// Our local swift blob store instance
private final SwiftBlobStore blobStore;
private final SwiftRepository repository;
// The root path for blobs. Used by buildKey to build full blob names
private final String keyPath;
private final boolean blobExistsCheckAllowed;
private final ExecutorService executor;
/**
* Constructor
* @param blobStore The blob store to use for operations
* @param path The BlobPath to find blobs in
*/
protected SwiftBlobContainer(SwiftBlobStore blobStore, BlobPath path) {
super(path);
this.blobStore = blobStore;
this.repository = blobStore.getRepository();
this.executor = repository != null ? repository.threadPool().executor(ThreadPool.Names.SNAPSHOT) : null;
String keyPath = path.buildAsString();
this.keyPath = keyPath.isEmpty() || keyPath.endsWith("/") ? keyPath : keyPath + "/";
boolean minimizeBlobExistsChecks = SwiftRepository.Swift.MINIMIZE_BLOB_EXISTS_CHECKS_SETTING.get(blobStore.getSettings());
this.blobExistsCheckAllowed = keyPath.isEmpty() || !minimizeBlobExistsChecks;
this.retryIntervalS = SwiftRepository.Swift.RETRY_INTERVAL_S.get(blobStore.getSettings());
this.shortOperationTimeoutS = SwiftRepository.Swift.SHORT_OPERATION_TIMEOUT_S.get(blobStore.getSettings());
}
/**
* Delete a blob. Straightforward.
* @param blobName A blob to delete
*/
@Override
public void deleteBlob(final String blobName) throws IOException {
if (executor == null) {
internalDeleteBlob(blobName);
return;
}
Future<DeleteResult> task = executor.submit(() -> internalDeleteBlob(blobName));
repository.addDeletion(blobName, task);
}
private DeleteResult internalDeleteBlob(String blobName) throws NoSuchFileException {
try {
return SwiftPerms.exec(() -> {
StoredObject object = blobStore.getContainer().getObject(buildKey(blobName));
long contentLength = object.getContentLength();
object.delete();
return new DeleteResult(1, contentLength);
});
} catch (NotFoundException e) {
NoSuchFileException e2 = new NoSuchFileException("Blob [" + buildKey(blobName) + "] cannot be deleted");
e2.initCause(e);
throw e2;
}
}
@Override
public DeleteResult delete() throws IOException {
Collection<StoredObject> containerObjects = SwiftPerms.exec(() -> {
Container container = blobStore.getContainer();
ContainerPaginationMap containerPaginationMap = new ContainerPaginationMap(container, keyPath, container.getMaxPageSize());
return containerPaginationMap.listAllItems();
});
DeleteResult results = DeleteResult.ZERO;
ArrayList<Exception> errors = new ArrayList<>();
for (StoredObject so: containerObjects) {
try {
long size = SwiftPerms.exec(so::getContentLength);
deleteBlob(so.getName().substring(keyPath.length())); //SwiftPerms checked internally
results = results.add(1, size);
} catch (Exception e) {
errors.add(e);
}
}
if (errors.isEmpty()) {
return results;
}
String message = errors.stream().map(Exception::getMessage).collect(Collectors.joining(","));
throw new IOException(message);
}
/**
* Get the blobs matching a given prefix
* @param blobNamePrefix The prefix to look for blobs with
* @return blobs metadata
*/
@Override
public Map<String, BlobMetaData> listBlobsByPrefix(@Nullable final String blobNamePrefix) throws IOException {
String directoryKey = blobNamePrefix == null ? keyPath : buildKey(blobNamePrefix);
try {
Collection<DirectoryOrObject> directoryList = SwiftPerms.exec(() ->
blobStore.getContainer().listDirectory(new Directory(directoryKey, '/'))
);
HashMap<String, PlainBlobMetaData> blobMap = new HashMap<>();
for (DirectoryOrObject obj: directoryList) {
if (obj.isObject()) {
String name = obj.getName().substring(keyPath.length());
Long length = SwiftPerms.exec(() -> obj.getAsObject().getContentLength());
PlainBlobMetaData meta = new PlainBlobMetaData(name, length);
blobMap.put(name, meta);
}
}
return Collections.unmodifiableMap(blobMap);
} catch (NotFoundException e) {
NoSuchFileException e2 = new NoSuchFileException("Cannot list blobs in directory [" + directoryKey + "]");
e2.initCause(e);
throw e2;
}
}
/**
* Get all the blobs
*/
@Override
public Map<String, BlobMetaData> listBlobs() throws IOException {
return listBlobsByPrefix(null);
}
@Override
public Map<String, BlobContainer> children() {
Collection<DirectoryOrObject> objects = SwiftPerms.exec(() -> blobStore.getContainer().listDirectory(new Directory(keyPath, '/')));
HashMap<String, BlobContainer> blobMap = new HashMap<>();
for (DirectoryOrObject obj: objects) {
if (obj.isDirectory()){
String name = obj.getBareName();
BlobContainer blobContainer = blobStore.blobContainer(new BlobPath().add(obj.getName()));
blobMap.put(name, blobContainer);
}
}
return Collections.unmodifiableMap(blobMap);
}
/**
* Build a key for a blob name, based on the keyPath
* @param blobName The blob name to build a key for
* @return the key
*/
private String buildKey(String blobName) {
return keyPath + blobName;
}
/**
* Fetch a given blob into a BufferedInputStream
* @param blobName The blob name to read
* @return a stream
*/
@Override
public InputStream readBlob(final String blobName) throws IOException {
try {
InputStream is = new BufferedInputStream(
SwiftPerms.exec(() -> blobStore.getContainer().getObject(buildKey(blobName)).downloadObjectAsInputStream()),
(int) blobStore.getBufferSizeInBytes());
return is;
} catch (NotFoundException e){
NoSuchFileException e2 = new NoSuchFileException("Blob object [" + buildKey(blobName) + "] cannot be read");
e2.initCause(e);
throw e2;
}
}
@Override
public void writeBlob(final String blobName,
final InputStream in,
final long blobSize,
boolean failIfAlreadyExists) throws IOException {
try {
SwiftPerms.execThrows(() -> {
StoredObject blob = blobStore.getContainer().getObject(buildKey(blobName));
if (failIfAlreadyExists && blobExistsCheckAllowed && blob.exists()) {
throw new FileAlreadyExistsException("blob [" + buildKey(blobName) + "] already exists, cannot overwrite");
}
blob.uploadObject(in);
});
} catch (IOException | RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IOException(e);
}
}
@Override
public void writeBlobAtomic(String blobName,
InputStream inputStream,
long blobSize,
boolean failIfAlreadyExists) throws IOException {
writeBlob(blobName, inputStream, blobSize, failIfAlreadyExists);
}
}
| ULTI-419032 - Alex - added repeat and retries for deletes
| src/main/java/org/wikimedia/elasticsearch/swift/repositories/blobstore/SwiftBlobContainer.java | ULTI-419032 - Alex - added repeat and retries for deletes |
|
Java | apache-2.0 | f339780b884f3912cd2dbbc1c1d9855e74ac2ed2 | 0 | praminda/carbon-apimgt,isharac/carbon-apimgt,prasa7/carbon-apimgt,tharikaGitHub/carbon-apimgt,nuwand/carbon-apimgt,chamilaadhi/carbon-apimgt,tharikaGitHub/carbon-apimgt,tharikaGitHub/carbon-apimgt,Rajith90/carbon-apimgt,nuwand/carbon-apimgt,isharac/carbon-apimgt,nuwand/carbon-apimgt,ruks/carbon-apimgt,harsha89/carbon-apimgt,harsha89/carbon-apimgt,ruks/carbon-apimgt,chamindias/carbon-apimgt,tharindu1st/carbon-apimgt,Rajith90/carbon-apimgt,tharikaGitHub/carbon-apimgt,prasa7/carbon-apimgt,fazlan-nazeem/carbon-apimgt,isharac/carbon-apimgt,prasa7/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,nuwand/carbon-apimgt,prasa7/carbon-apimgt,chamilaadhi/carbon-apimgt,tharindu1st/carbon-apimgt,ruks/carbon-apimgt,jaadds/carbon-apimgt,tharindu1st/carbon-apimgt,Rajith90/carbon-apimgt,uvindra/carbon-apimgt,wso2/carbon-apimgt,uvindra/carbon-apimgt,fazlan-nazeem/carbon-apimgt,chamindias/carbon-apimgt,ruks/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,uvindra/carbon-apimgt,jaadds/carbon-apimgt,jaadds/carbon-apimgt,bhathiya/carbon-apimgt,chamindias/carbon-apimgt,pubudu538/carbon-apimgt,harsha89/carbon-apimgt,pubudu538/carbon-apimgt,harsha89/carbon-apimgt,chamindias/carbon-apimgt,chamilaadhi/carbon-apimgt,pubudu538/carbon-apimgt,praminda/carbon-apimgt,malinthaprasan/carbon-apimgt,fazlan-nazeem/carbon-apimgt,pubudu538/carbon-apimgt,wso2/carbon-apimgt,malinthaprasan/carbon-apimgt,tharindu1st/carbon-apimgt,wso2/carbon-apimgt,praminda/carbon-apimgt,jaadds/carbon-apimgt,isharac/carbon-apimgt,bhathiya/carbon-apimgt,malinthaprasan/carbon-apimgt,wso2/carbon-apimgt,fazlan-nazeem/carbon-apimgt,malinthaprasan/carbon-apimgt,bhathiya/carbon-apimgt,uvindra/carbon-apimgt,Rajith90/carbon-apimgt,bhathiya/carbon-apimgt,chamilaadhi/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt | /*
* Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.apimgt.impl;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.output.ByteArrayOutputStream;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.solr.client.solrj.util.ClientUtils;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
import org.wso2.carbon.CarbonConstants;
import org.wso2.carbon.apimgt.api.APIConsumer;
import org.wso2.carbon.apimgt.api.APIManagementException;
import org.wso2.carbon.apimgt.api.APIMgtResourceNotFoundException;
import org.wso2.carbon.apimgt.api.LoginPostExecutor;
import org.wso2.carbon.apimgt.api.NewPostLoginExecutor;
import org.wso2.carbon.apimgt.api.WorkflowResponse;
import org.wso2.carbon.apimgt.api.model.API;
import org.wso2.carbon.apimgt.api.model.APIIdentifier;
import org.wso2.carbon.apimgt.api.model.APIKey;
import org.wso2.carbon.apimgt.api.model.APIRating;
import org.wso2.carbon.apimgt.api.model.AccessTokenInfo;
import org.wso2.carbon.apimgt.api.model.AccessTokenRequest;
import org.wso2.carbon.apimgt.api.model.Application;
import org.wso2.carbon.apimgt.api.model.ApplicationConstants;
import org.wso2.carbon.apimgt.api.model.ApplicationKeysDTO;
import org.wso2.carbon.apimgt.api.model.Documentation;
import org.wso2.carbon.apimgt.api.model.KeyManager;
import org.wso2.carbon.apimgt.api.model.OAuthAppRequest;
import org.wso2.carbon.apimgt.api.model.OAuthApplicationInfo;
import org.wso2.carbon.apimgt.api.model.Scope;
import org.wso2.carbon.apimgt.api.model.SubscribedAPI;
import org.wso2.carbon.apimgt.api.model.Subscriber;
import org.wso2.carbon.apimgt.api.model.SubscriptionResponse;
import org.wso2.carbon.apimgt.api.model.Tag;
import org.wso2.carbon.apimgt.api.model.Tier;
import org.wso2.carbon.apimgt.api.model.TierPermission;
import org.wso2.carbon.apimgt.impl.caching.CacheInvalidator;
import org.wso2.carbon.apimgt.impl.dto.ApplicationRegistrationWorkflowDTO;
import org.wso2.carbon.apimgt.impl.dto.ApplicationWorkflowDTO;
import org.wso2.carbon.apimgt.impl.dto.SubscriptionWorkflowDTO;
import org.wso2.carbon.apimgt.impl.dto.TierPermissionDTO;
import org.wso2.carbon.apimgt.impl.dto.WorkflowDTO;
import org.wso2.carbon.apimgt.impl.factory.KeyManagerHolder;
import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder;
import org.wso2.carbon.apimgt.impl.utils.APIMWSDLReader;
import org.wso2.carbon.apimgt.impl.utils.APINameComparator;
import org.wso2.carbon.apimgt.impl.utils.APIUtil;
import org.wso2.carbon.apimgt.impl.utils.APIVersionComparator;
import org.wso2.carbon.apimgt.impl.utils.ApplicationUtils;
import org.wso2.carbon.apimgt.impl.workflow.AbstractApplicationRegistrationWorkflowExecutor;
import org.wso2.carbon.apimgt.impl.workflow.GeneralWorkflowResponse;
import org.wso2.carbon.apimgt.impl.workflow.WorkflowConstants;
import org.wso2.carbon.apimgt.impl.workflow.WorkflowException;
import org.wso2.carbon.apimgt.impl.workflow.WorkflowExecutor;
import org.wso2.carbon.apimgt.impl.workflow.WorkflowExecutorFactory;
import org.wso2.carbon.apimgt.impl.workflow.WorkflowStatus;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.governance.api.common.dataobjects.GovernanceArtifact;
import org.wso2.carbon.governance.api.exception.GovernanceException;
import org.wso2.carbon.governance.api.generic.GenericArtifactManager;
import org.wso2.carbon.governance.api.generic.dataobjects.GenericArtifact;
import org.wso2.carbon.governance.api.util.GovernanceUtils;
import org.wso2.carbon.registry.common.TermData;
import org.wso2.carbon.registry.core.ActionConstants;
import org.wso2.carbon.registry.core.Association;
import org.wso2.carbon.registry.core.Registry;
import org.wso2.carbon.registry.core.RegistryConstants;
import org.wso2.carbon.registry.core.Resource;
import org.wso2.carbon.registry.core.config.RegistryContext;
import org.wso2.carbon.registry.core.exceptions.RegistryException;
import org.wso2.carbon.registry.core.pagination.PaginationContext;
import org.wso2.carbon.registry.core.service.RegistryService;
import org.wso2.carbon.registry.core.session.UserRegistry;
import org.wso2.carbon.registry.core.utils.RegistryUtils;
import org.wso2.carbon.user.api.AuthorizationManager;
import org.wso2.carbon.user.api.UserStoreException;
import org.wso2.carbon.user.api.UserStoreManager;
import org.wso2.carbon.user.core.service.RealmService;
import org.wso2.carbon.user.mgt.stub.UserAdminStub;
import org.wso2.carbon.user.mgt.stub.UserAdminUserAdminException;
import org.wso2.carbon.utils.CarbonUtils;
import org.wso2.carbon.utils.multitenancy.MultitenantConstants;
import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.cache.Caching;
import javax.wsdl.Definition;
/**
* This class provides the core API store functionality. It is implemented in a very
* self-contained and 'pure' manner, without taking requirements like security into account,
* which are subject to frequent change. Due to this 'pure' nature and the significance of
* the class to the overall API management functionality, the visibility of the class has
* been reduced to package level. This means we can still use it for internal purposes and
* possibly even extend it, but it's totally off the limits of the users. Users wishing to
* programmatically access this functionality should use one of the extensions of this
* class which is visible to them. These extensions may add additional features like
* security to this class.
*/
public class APIConsumerImpl extends AbstractAPIManager implements APIConsumer {
private static final Log log = LogFactory.getLog(APIConsumerImpl.class);
public static final char COLON_CHAR = ':';
public static final String EMPTY_STRING = "";
public static final String ENVIRONMENT_NAME = "environmentName";
public static final String ENVIRONMENT_TYPE = "environmentType";
public static final String API_NAME = "apiName";
public static final String API_VERSION = "apiVersion";
public static final String API_PROVIDER = "apiProvider";
/* Map to Store APIs against Tag */
private ConcurrentMap<String, Set<API>> taggedAPIs = new ConcurrentHashMap<String, Set<API>>();
private boolean isTenantModeStoreView;
private String requestedTenant;
private boolean isTagCacheEnabled;
private Set<Tag> tagSet;
private long tagCacheValidityTime;
private volatile long lastUpdatedTime;
private volatile long lastUpdatedTimeForTagApi;
private final Object tagCacheMutex = new Object();
private final Object tagWithAPICacheMutex = new Object();
protected APIMRegistryService apimRegistryService;
protected String userNameWithoutChange;
public APIConsumerImpl() throws APIManagementException {
super();
readTagCacheConfigs();
}
public APIConsumerImpl(String username, APIMRegistryService apimRegistryService) throws APIManagementException {
super(username);
userNameWithoutChange = username;
readTagCacheConfigs();
this.apimRegistryService = apimRegistryService;
}
private void readTagCacheConfigs() {
APIManagerConfiguration config = getAPIManagerConfiguration();
String enableTagCache = config.getFirstProperty(APIConstants.STORE_TAG_CACHE_DURATION);
if (enableTagCache == null) {
isTagCacheEnabled = false;
tagCacheValidityTime = 0;
} else {
isTagCacheEnabled = true;
tagCacheValidityTime = Long.parseLong(enableTagCache);
}
}
@Override
public Subscriber getSubscriber(String subscriberId) throws APIManagementException {
Subscriber subscriber = null;
try {
subscriber = apiMgtDAO.getSubscriber(subscriberId);
} catch (APIManagementException e) {
handleException("Failed to get Subscriber", e);
}
return subscriber;
}
/**
* Returns the set of APIs with the given tag from the taggedAPIs Map
*
* @param tagName The name of the tag
* @return Set of {@link API} with the given tag
* @throws APIManagementException
*/
@Override
public Set<API> getAPIsWithTag(String tagName, String requestedTenantDomain) throws APIManagementException {
/* We keep track of the lastUpdatedTime of the TagCache to determine its freshness.
*/
long lastUpdatedTimeAtStart = lastUpdatedTimeForTagApi;
long currentTimeAtStart = System.currentTimeMillis();
if(isTagCacheEnabled && ( (currentTimeAtStart- lastUpdatedTimeAtStart) < tagCacheValidityTime)){
if (taggedAPIs != null && taggedAPIs.containsKey(tagName)) {
return taggedAPIs.get(tagName);
}
}else{
synchronized (tagWithAPICacheMutex) {
lastUpdatedTimeForTagApi = System.currentTimeMillis();
taggedAPIs = new ConcurrentHashMap<String, Set<API>>();
}
}
boolean isTenantMode = requestedTenantDomain != null && !"null".equalsIgnoreCase(requestedTenantDomain);
this.isTenantModeStoreView = isTenantMode;
if (requestedTenantDomain != null && !"null".equals(requestedTenantDomain)) {
this.requestedTenant = requestedTenantDomain;
}
Registry userRegistry;
boolean isTenantFlowStarted = false;
Set<API> apisWithTag = null;
try {
//start the tenant flow prior to loading registry
if (requestedTenant != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(requestedTenant)) {
isTenantFlowStarted = startTenantFlowForTenantDomain(requestedTenantDomain);
}
if ((isTenantMode && this.tenantDomain == null) ||
(isTenantMode && isTenantDomainNotMatching(requestedTenantDomain))) {//Tenant store anonymous mode
int tenantId = getTenantId(requestedTenantDomain);
// explicitly load the tenant's registry
APIUtil.loadTenantRegistry(tenantId);
userRegistry = getGovernanceUserRegistry(tenantId);
setUsernameToThreadLocalCarbonContext(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME);
} else {
userRegistry = registry;
setUsernameToThreadLocalCarbonContext(this.username);
}
apisWithTag = getAPIsWithTag(userRegistry, tagName);
/* Add the APIs against the tag name */
if (!apisWithTag.isEmpty()) {
if (taggedAPIs.containsKey(tagName)) {
for (API api : apisWithTag) {
taggedAPIs.get(tagName).add(api);
}
} else {
taggedAPIs.putIfAbsent(tagName, apisWithTag);
}
}
} catch (RegistryException e) {
handleException("Failed to get api by the tag", e);
} catch (UserStoreException e) {
handleException("Failed to get api by the tag", e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
return apisWithTag;
}
protected void setUsernameToThreadLocalCarbonContext(String username) {
PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(username);
}
protected UserRegistry getGovernanceUserRegistry(int tenantId) throws RegistryException {
return ServiceReferenceHolder.getInstance().getRegistryService().
getGovernanceUserRegistry(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME, tenantId);
}
protected int getTenantId(String requestedTenantDomain) throws UserStoreException {
return ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(requestedTenantDomain);
}
/**
* Returns the set of APIs with the given tag from the taggedAPIs Map.
*
* @param tag The name of the tag
* @param start The starting index of the return result set
* @param end The end index of the return result set
* @return A {@link Map} of APIs(between the given indexes) and the total number indicating all the available
* APIs count
* @throws APIManagementException
*/
@Override
public Map<String, Object> getPaginatedAPIsWithTag(String tag, int start, int end, String tenantDomain) throws APIManagementException {
List<API> apiList = new ArrayList<API>();
Set<API> resultSet = new TreeSet<API>(new APIVersionComparator());
Map<String, Object> results = new HashMap<String, Object>();
Set<API> taggedAPISet = this.getAPIsWithTag(tag,tenantDomain);
if (taggedAPISet != null) {
if (taggedAPISet.size() < end) {
end = taggedAPISet.size();
}
int totalLength;
apiList.addAll(taggedAPISet);
totalLength = apiList.size();
if (totalLength <= ((start + end) - 1)) {
end = totalLength;
} else {
end = start + end;
}
for (int i = start; i < end; i++) {
resultSet.add(apiList.get(i));
}
results.put("apis", resultSet);
results.put("length", taggedAPISet.size());
} else {
results.put("apis", null);
results.put("length", 0);
}
return results;
}
/**
* Returns the set of APIs with the given tag, retrieved from registry
*
* @param registry - Current registry; tenant/SuperTenant
* @param tag - The tag name
* @return A {@link Set} of {@link API} objects.
* @throws APIManagementException
*/
private Set<API> getAPIsWithTag(Registry registry, String tag)
throws APIManagementException {
Set<API> apiSet = new TreeSet<API>(new APINameComparator());
try {
List<GovernanceArtifact> genericArtifacts =
GovernanceUtils.findGovernanceArtifacts(getSearchQuery(APIConstants.TAG_SEARCH_TYPE_PREFIX2 + tag), registry,
APIConstants.API_RXT_MEDIA_TYPE);
for (GovernanceArtifact genericArtifact : genericArtifacts) {
try {
String apiStatus = APIUtil.getLcStateFromArtifact(genericArtifact);
if (genericArtifact != null && (APIConstants.PUBLISHED.equals(apiStatus)
|| APIConstants.PROTOTYPED.equals(apiStatus))) {
API api = APIUtil.getAPI(genericArtifact);
if (api != null) {
apiSet.add(api);
}
}
} catch (RegistryException e) {
log.warn("User is not authorized to get an API with tag " + tag, e);
}
}
} catch (RegistryException e) {
handleException("Failed to get API for tag " + tag, e);
}
return apiSet;
}
/**
* The method to get APIs to Store view
*
* @return Set<API> Set of APIs
* @throws APIManagementException
*/
@Override
public Set<API> getAllPublishedAPIs(String tenantDomain) throws APIManagementException {
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
SortedSet<API> apiVersionsSortedSet = new TreeSet<API>(new APIVersionComparator());
try {
Registry userRegistry;
boolean isTenantMode=(tenantDomain != null);
if ((isTenantMode && this.tenantDomain==null) || (isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant store anonymous mode
int tenantId = getTenantId(tenantDomain);
userRegistry = getGovernanceUserRegistry(tenantId);
} else {
userRegistry = registry;
}
this.isTenantModeStoreView = isTenantMode;
this.requestedTenant = tenantDomain;
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY);
if (artifactManager != null) {
GenericArtifact[] genericArtifacts = artifactManager.getAllGenericArtifacts();
if (genericArtifacts == null || genericArtifacts.length == 0) {
return apiSortedSet;
}
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
List<API> multiVersionedAPIs = new ArrayList<API>();
Comparator<API> versionComparator = new APIVersionComparator();
Boolean displayMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
Boolean displayAPIsWithMultipleStatus = APIUtil.isAllowDisplayAPIsWithMultipleStatus();
for (GenericArtifact artifact : genericArtifacts) {
// adding the API provider can mark the latest API .
String status = APIUtil.getLcStateFromArtifact(artifact);
API api = null;
//Check the api-manager.xml config file entry <DisplayAllAPIs> value is false
if (!displayAPIsWithMultipleStatus) {
// then we are only interested in published APIs here...
if (APIConstants.PUBLISHED.equals(status)) {
api = APIUtil.getAPI(artifact);
}
} else { // else we are interested in both deprecated/published APIs here...
if (APIConstants.PUBLISHED.equals(status) || APIConstants.DEPRECATED.equals(status)) {
api = APIUtil.getAPI(artifact);
}
}
if (api != null) {
try {
checkAccessControlPermission(api.getId());
} catch (APIManagementException e) {
// This is a second level of filter to get apis based on access control and visibility.
// Hence log is set as debug and continued.
if(log.isDebugEnabled()) {
log.debug("User is not authorized to view the api " + api.getId().getApiName(), e);
}
continue;
}
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
multiVersionedAPIs.add(api);
}
}
}
if (!displayMultipleVersions) {
apiSortedSet.addAll(latestPublishedAPIs.values());
return apiSortedSet;
} else {
apiVersionsSortedSet.addAll(multiVersionedAPIs);
return apiVersionsSortedSet;
}
} else {
String errorMessage = "Artifact manager is null for tenant domain " + tenantDomain
+ " when retrieving APIs for store. User : " + PrivilegedCarbonContext
.getThreadLocalCarbonContext().getUsername();
log.error(errorMessage);
throw new APIManagementException(errorMessage);
}
} catch (RegistryException e) {
handleException("Failed to get all published APIs", e);
} catch (UserStoreException e) {
handleException("Failed to get all published APIs", e);
}
return apiSortedSet;
}
/**
* The method to get APIs to Store view *
*
* @return Set<API> Set of APIs
* @throws APIManagementException
*/
@Override
@Deprecated
public Map<String,Object> getAllPaginatedPublishedAPIs(String tenantDomain,int start,int end)
throws APIManagementException {
Boolean displayAPIsWithMultipleStatus = false;
try {
if (tenantDomain != null) {
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
}
displayAPIsWithMultipleStatus = APIUtil.isAllowDisplayAPIsWithMultipleStatus();
}finally {
endTenantFlow();
}
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
//Check the api-manager.xml config file entry <DisplayAllAPIs> value is false
if (!displayAPIsWithMultipleStatus) {
//Create the search attribute map
listMap.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(APIConstants.PUBLISHED);
}});
} else{
return getAllPaginatedAPIs(tenantDomain, start, end);
}
Map<String, Object> result = new HashMap<String, Object>();
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
SortedSet<API> apiVersionsSortedSet = new TreeSet<API>(new APIVersionComparator());
int totalLength = 0;
try {
Registry userRegistry;
boolean isTenantMode = (tenantDomain != null);
if ((isTenantMode && this.tenantDomain == null) ||
(isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant store anonymous mode
int tenantId = getTenantId(tenantDomain);
// explicitly load the tenant's registry
APIUtil.loadTenantRegistry(tenantId);
userRegistry = getGovernanceUserRegistry(tenantId);
setUsernameToThreadLocalCarbonContext(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME);
} else {
userRegistry = registry;
setUsernameToThreadLocalCarbonContext(this.username);
}
this.isTenantModeStoreView = isTenantMode;
this.requestedTenant = tenantDomain;
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
List<API> multiVersionedAPIs = new ArrayList<API>();
Comparator<API> versionComparator = new APIVersionComparator();
Boolean displayMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
PaginationContext.init(start, end, "ASC", APIConstants.API_OVERVIEW_NAME, Integer.MAX_VALUE);
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY);
if (artifactManager != null) {
GenericArtifact[] genericArtifacts = artifactManager.findGenericArtifacts(listMap);
totalLength = PaginationContext.getInstance().getLength();
if (genericArtifacts == null || genericArtifacts.length == 0) {
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
return result;
}
for (GenericArtifact artifact : genericArtifacts) {
if (artifact == null) {
log.error("Failed to retrieve artifact when getting paginated published API.");
continue;
}
// adding the API provider can mark the latest API .
API api = APIUtil.getAPI(artifact);
if (api != null) {
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
multiVersionedAPIs.add(api);
}
}
}
if (!displayMultipleVersions) {
apiSortedSet.addAll(latestPublishedAPIs.values());
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
return result;
} else {
apiVersionsSortedSet.addAll(multiVersionedAPIs);
result.put("apis", apiVersionsSortedSet);
result.put("totalLength", totalLength);
return result;
}
} else {
String errorMessage = "Artifact manager is null for tenant domain " + tenantDomain
+ " when retrieving all Published APIs.";
log.error(errorMessage);
}
} catch (RegistryException e) {
handleException("Failed to get all published APIs", e);
} catch (UserStoreException e) {
handleException("Failed to get all published APIs", e);
} finally {
PaginationContext.destroy();
}
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
return result;
}
/**
* Regenerate consumer secret.
*
* @param clientId For which consumer key we need to regenerate consumer secret.
* @return New consumer secret.
* @throws APIManagementException This is the custom exception class for API management.
*/
public String renewConsumerSecret(String clientId) throws APIManagementException {
// Create Token Request with parameters provided from UI.
AccessTokenRequest tokenRequest = new AccessTokenRequest();
tokenRequest.setClientId(clientId);
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
return keyManager.getNewApplicationConsumerSecret(tokenRequest);
}
/**
* The method to get APIs in any of the given LC status array
*
* @return Map<String, Object> API result set with pagination information
* @throws APIManagementException
*/
@Override
public Map<String, Object> getAllPaginatedAPIsByStatus(String tenantDomain,
int start, int end, final String[] apiStatus, boolean returnAPITags) throws APIManagementException {
Map<String,Object> result=new HashMap<String, Object>();
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
SortedSet<API> apiVersionsSortedSet = new TreeSet<API>(new APIVersionComparator());
int totalLength=0;
boolean isMore = false;
String criteria = APIConstants.LCSTATE_SEARCH_TYPE_KEY;
try {
Registry userRegistry;
boolean isTenantMode=(tenantDomain != null);
if ((isTenantMode && this.tenantDomain==null) || (isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant store anonymous mode
int tenantId = getTenantId(tenantDomain);
// explicitly load the tenant's registry
APIUtil.loadTenantRegistry(tenantId);
userRegistry = getGovernanceUserRegistry(tenantId);
setUsernameToThreadLocalCarbonContext(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME);
} else {
userRegistry = registry;
setUsernameToThreadLocalCarbonContext(this.username);
}
this.isTenantModeStoreView = isTenantMode;
this.requestedTenant = tenantDomain;
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
List<API> multiVersionedAPIs = new ArrayList<API>();
Comparator<API> versionComparator = new APIVersionComparator();
Boolean displayMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
String paginationLimit = getAPIManagerConfiguration().
getFirstProperty(APIConstants.API_STORE_APIS_PER_PAGE);
// If the Config exists use it to set the pagination limit
final int maxPaginationLimit;
if (paginationLimit != null) {
// The additional 1 added to the maxPaginationLimit is to help us determine if more
// APIs may exist so that we know that we are unable to determine the actual total
// API count. We will subtract this 1 later on so that it does not interfere with
// the logic of the rest of the application
int pagination = Integer.parseInt(paginationLimit);
// Because the store jaggery pagination logic is 10 results per a page we need to set pagination
// limit to at least 11 or the pagination done at this level will conflict with the store pagination
// leading to some of the APIs not being displayed
if (pagination < 11) {
pagination = 11;
log.warn("Value of '" + APIConstants.API_STORE_APIS_PER_PAGE + "' is too low, defaulting to 11");
}
maxPaginationLimit = start + pagination + 1;
}
// Else if the config is not specified we go with default functionality and load all
else {
maxPaginationLimit = Integer.MAX_VALUE;
}
PaginationContext.init(start, end, "ASC", APIConstants.API_OVERVIEW_NAME, maxPaginationLimit);
criteria = criteria + APIUtil.getORBasedSearchCriteria(apiStatus);
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY);
if (artifactManager != null) {
if (apiStatus != null && apiStatus.length > 0) {
List<GovernanceArtifact> genericArtifacts = GovernanceUtils.findGovernanceArtifacts
(getSearchQuery(criteria), userRegistry, APIConstants.API_RXT_MEDIA_TYPE);
totalLength = PaginationContext.getInstance().getLength();
if (genericArtifacts == null || genericArtifacts.size() == 0) {
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
result.put("isMore", isMore);
return result;
}
// Check to see if we can speculate that there are more APIs to be loaded
if (maxPaginationLimit == totalLength) {
isMore = true; // More APIs exist so we cannot determine the total API count without incurring a
// performance hit
--totalLength; // Remove the additional 1 we added earlier when setting max pagination limit
}
int tempLength = 0;
for (GovernanceArtifact artifact : genericArtifacts) {
API api = null;
try {
api = APIUtil.getAPI(artifact);
} catch (APIManagementException e) {
//log and continue since we want to load the rest of the APIs.
log.error("Error while loading API " + artifact.getAttribute(APIConstants.API_OVERVIEW_NAME),
e);
}
if (api != null) {
if (returnAPITags) {
String artifactPath = GovernanceUtils.getArtifactPath(registry, artifact.getId());
Set<String> tags = new HashSet<String>();
org.wso2.carbon.registry.core.Tag[] tag = registry.getTags(artifactPath);
for (org.wso2.carbon.registry.core.Tag tag1 : tag) {
tags.add(tag1.getTagName());
}
api.addTags(tags);
}
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
multiVersionedAPIs.add(api);
}
}
tempLength++;
if (tempLength >= totalLength) {
break;
}
}
if (!displayMultipleVersions) {
apiSortedSet.addAll(latestPublishedAPIs.values());
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
result.put("isMore", isMore);
return result;
} else {
apiVersionsSortedSet.addAll(multiVersionedAPIs);
result.put("apis", apiVersionsSortedSet);
result.put("totalLength", totalLength);
result.put("isMore", isMore);
return result;
}
}
} else {
String errorMessage = "Artifact manager is null for tenant domain " + tenantDomain
+ " when retrieving all paginated APIs by status.";
log.error(errorMessage);
}
} catch (RegistryException e) {
handleException("Failed to get all published APIs", e);
} catch (UserStoreException e) {
handleException("Failed to get all published APIs", e);
} finally {
PaginationContext.destroy();
}
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
result.put("isMore", isMore);
return result;
}
/**
* The method to get APIs by given status to Store view
*
* @return Set<API> Set of APIs
* @throws APIManagementException
*/
@Override
@Deprecated
public Map<String, Object> getAllPaginatedAPIsByStatus(String tenantDomain,
int start, int end, final String apiStatus, boolean returnAPITags) throws APIManagementException {
try {
if (tenantDomain != null) {
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
}
}finally {
endTenantFlow();
}
Boolean displayAPIsWithMultipleStatus = APIUtil.isAllowDisplayAPIsWithMultipleStatus();
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
//Check the api-manager.xml config file entry <DisplayAllAPIs> value is false
if (APIConstants.PROTOTYPED.equals(apiStatus)) {
listMap.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(apiStatus);
}});
} else {
if (!displayAPIsWithMultipleStatus) {
//Create the search attribute map
listMap.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(apiStatus);
}});
} else {
return getAllPaginatedAPIs(tenantDomain, start, end);
}
}
Map<String,Object> result=new HashMap<String, Object>();
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
SortedSet<API> apiVersionsSortedSet = new TreeSet<API>(new APIVersionComparator());
int totalLength=0;
boolean isMore = false;
try {
Registry userRegistry;
boolean isTenantMode=(tenantDomain != null);
if ((isTenantMode && this.tenantDomain==null) || (isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant store anonymous mode
int tenantId = getTenantId(tenantDomain);
// explicitly load the tenant's registry
APIUtil.loadTenantRegistry(tenantId);
userRegistry = getGovernanceUserRegistry(tenantId);
setUsernameToThreadLocalCarbonContext(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME);
} else {
userRegistry = registry;
setUsernameToThreadLocalCarbonContext(this.username);
}
this.isTenantModeStoreView = isTenantMode;
this.requestedTenant = tenantDomain;
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
List<API> multiVersionedAPIs = new ArrayList<API>();
Comparator<API> versionComparator = new APIVersionComparator();
Boolean displayMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
String paginationLimit = getAPIManagerConfiguration()
.getFirstProperty(APIConstants.API_STORE_APIS_PER_PAGE);
// If the Config exists use it to set the pagination limit
final int maxPaginationLimit;
if (paginationLimit != null) {
// The additional 1 added to the maxPaginationLimit is to help us determine if more
// APIs may exist so that we know that we are unable to determine the actual total
// API count. We will subtract this 1 later on so that it does not interfere with
// the logic of the rest of the application
int pagination = Integer.parseInt(paginationLimit);
// Because the store jaggery pagination logic is 10 results per a page we need to set pagination
// limit to at least 11 or the pagination done at this level will conflict with the store pagination
// leading to some of the APIs not being displayed
if (pagination < 11) {
pagination = 11;
log.warn("Value of '" + APIConstants.API_STORE_APIS_PER_PAGE + "' is too low, defaulting to 11");
}
maxPaginationLimit = start + pagination + 1;
}
// Else if the config is not specified we go with default functionality and load all
else {
maxPaginationLimit = Integer.MAX_VALUE;
}
PaginationContext.init(start, end, "ASC", APIConstants.API_OVERVIEW_NAME, maxPaginationLimit);
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY);
if (artifactManager != null) {
GenericArtifact[] genericArtifacts = artifactManager.findGenericArtifacts(listMap);
totalLength=PaginationContext.getInstance().getLength();
if (genericArtifacts == null || genericArtifacts.length == 0) {
result.put("apis",apiSortedSet);
result.put("totalLength",totalLength);
result.put("isMore", isMore);
return result;
}
// Check to see if we can speculate that there are more APIs to be loaded
if (maxPaginationLimit == totalLength) {
isMore = true; // More APIs exist so we cannot determine the total API count without incurring a
// performance hit
--totalLength; // Remove the additional 1 we added earlier when setting max pagination limit
}
int tempLength=0;
for (GenericArtifact artifact : genericArtifacts) {
if (artifact == null) {
log.error("Failed to retrieve artifact when getting all paginated APIs by status.");
continue;
}
API api = null;
try {
api = APIUtil.getAPI(artifact);
} catch (APIManagementException e) {
//log and continue since we want to load the rest of the APIs.
log.error("Error while loading API " + artifact.getAttribute(APIConstants.API_OVERVIEW_NAME),
e);
}
if (api != null) {
if (returnAPITags) {
String artifactPath = GovernanceUtils.getArtifactPath(registry, artifact.getId());
Set<String> tags = new HashSet<String>();
org.wso2.carbon.registry.core.Tag[] tag = registry.getTags(artifactPath);
for (org.wso2.carbon.registry.core.Tag tag1 : tag) {
tags.add(tag1.getTagName());
}
api.addTags(tags);
}
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
multiVersionedAPIs.add(api);
}
}
tempLength++;
if (tempLength >= totalLength){
break;
}
}
if (!displayMultipleVersions) {
apiSortedSet.addAll(latestPublishedAPIs.values());
result.put("apis",apiSortedSet);
result.put("totalLength",totalLength);
result.put("isMore", isMore);
return result;
} else {
apiVersionsSortedSet.addAll(multiVersionedAPIs);
result.put("apis",apiVersionsSortedSet);
result.put("totalLength",totalLength);
result.put("isMore", isMore);
return result;
}
} else {
String errorMessage = "Artifact manager is null for tenant domain " + tenantDomain
+ " when retrieving APIs by status.";
log.error(errorMessage);
}
} catch (RegistryException e) {
handleException("Failed to get all published APIs", e);
} catch (UserStoreException e) {
handleException("Failed to get all published APIs", e);
} finally {
PaginationContext.destroy();
}
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
result.put("isMore", isMore);
return result;
}
/**
* Re-generates the access token.
* @param oldAccessToken Token to be revoked
* @param clientId Consumer Key for the Application
* @param clientSecret Consumer Secret for the Application
* @param validityTime Desired Validity time for the token
* @param jsonInput Additional parameters if Authorization server needs any.
* @return Renewed Access Token.
* @throws APIManagementException
*/
@Override
public AccessTokenInfo renewAccessToken(String oldAccessToken, String clientId, String clientSecret,
String validityTime, String
requestedScopes[], String jsonInput) throws APIManagementException {
// Create Token Request with parameters provided from UI.
AccessTokenRequest tokenRequest = new AccessTokenRequest();
tokenRequest.setClientId(clientId);
tokenRequest.setClientSecret(clientSecret);
tokenRequest.setValidityPeriod(Long.parseLong(validityTime));
tokenRequest.setTokenToRevoke(oldAccessToken);
tokenRequest.setScope(requestedScopes);
try {
// Populating additional parameters.
tokenRequest = ApplicationUtils.populateTokenRequest(jsonInput, tokenRequest);
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
JSONObject appLogObject = new JSONObject();
appLogObject.put("Re-Generated Keys for application with client Id", clientId);
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.UPDATED, this.username);
return keyManager.getNewApplicationAccessToken(tokenRequest);
} catch (APIManagementException e) {
log.error("Error while re-generating AccessToken", e);
throw e;
}
}
/**
* The method to get All PUBLISHED and DEPRECATED APIs, to Store view
*
* @return Set<API> Set of APIs
* @throws APIManagementException
*/
@Deprecated
public Map<String,Object> getAllPaginatedAPIs(String tenantDomain,int start,int end) throws APIManagementException {
Map<String,Object> result=new HashMap<String, Object>();
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
SortedSet<API> apiVersionsSortedSet = new TreeSet<API>(new APIVersionComparator());
int totalLength=0;
try {
Registry userRegistry;
boolean isTenantMode=(tenantDomain != null);
if ((isTenantMode && this.tenantDomain==null) || (isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant store anonymous mode
int tenantId = getTenantId(tenantDomain);
userRegistry = getGovernanceUserRegistry(tenantId);
setUsernameToThreadLocalCarbonContext(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME);
} else {
userRegistry = registry;
setUsernameToThreadLocalCarbonContext(this.username);
}
this.isTenantModeStoreView = isTenantMode;
this.requestedTenant = tenantDomain;
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
List<API> multiVersionedAPIs = new ArrayList<API>();
Comparator<API> versionComparator = new APIVersionComparator();
Boolean displayMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY);
PaginationContext.init(start, end, "ASC", APIConstants.API_OVERVIEW_NAME, Integer.MAX_VALUE);
boolean noPublishedAPIs = false;
if (artifactManager != null) {
//Create the search attribute map for PUBLISHED APIs
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
listMap.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(APIConstants.PUBLISHED);
}});
GenericArtifact[] genericArtifacts = artifactManager.findGenericArtifacts(listMap);
totalLength = PaginationContext.getInstance().getLength();
if (genericArtifacts == null || genericArtifacts.length == 0) {
noPublishedAPIs = true;
}
int publishedAPICount;
if (genericArtifacts != null) {
for (GenericArtifact artifact : genericArtifacts) {
if (artifact == null) {
log.error("Failed to retrieve artifact when getting all paginated APIs.");
continue;
}
// adding the API provider can mark the latest API .
// String status = artifact.getAttribute(APIConstants.API_OVERVIEW_STATUS);
API api = APIUtil.getAPI(artifact);
if (api != null) {
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
// key = api.getId().getProviderName() + ":" + api.getId().getApiName() + ":" + api.getId()
// .getVersion();
multiVersionedAPIs.add(api);
}
}
}
}
if (!displayMultipleVersions) {
publishedAPICount = latestPublishedAPIs.size();
} else {
publishedAPICount = multiVersionedAPIs.size();
}
if ((start + end) > publishedAPICount) {
if (publishedAPICount > 0) {
/*Starting to retrieve DEPRECATED APIs*/
start = 0;
/* publishedAPICount is always less than end*/
end = end - publishedAPICount;
} else {
start = start - totalLength;
}
PaginationContext.init(start, end, "ASC", APIConstants.API_OVERVIEW_NAME, Integer.MAX_VALUE);
//Create the search attribute map for DEPRECATED APIs
Map<String, List<String>> listMapForDeprecatedAPIs = new HashMap<String, List<String>>();
listMapForDeprecatedAPIs.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(APIConstants.DEPRECATED);
}});
GenericArtifact[] genericArtifactsForDeprecatedAPIs = artifactManager.findGenericArtifacts(listMapForDeprecatedAPIs);
totalLength = totalLength + PaginationContext.getInstance().getLength();
if ((genericArtifactsForDeprecatedAPIs == null || genericArtifactsForDeprecatedAPIs.length == 0) && noPublishedAPIs) {
result.put("apis",apiSortedSet);
result.put("totalLength",totalLength);
return result;
}
if (genericArtifactsForDeprecatedAPIs != null) {
for (GenericArtifact artifact : genericArtifactsForDeprecatedAPIs) {
if (artifact == null) {
log.error("Failed to retrieve artifact when getting deprecated APIs.");
continue;
}
// adding the API provider can mark the latest API .
API api = APIUtil.getAPI(artifact);
if (api != null) {
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
multiVersionedAPIs.add(api);
}
}
}
}
}
if (!displayMultipleVersions) {
for (API api : latestPublishedAPIs.values()) {
apiSortedSet.add(api);
}
result.put("apis",apiSortedSet);
result.put("totalLength",totalLength);
return result;
} else {
apiVersionsSortedSet.addAll(multiVersionedAPIs);
result.put("apis",apiVersionsSortedSet);
result.put("totalLength",totalLength);
return result;
}
} else {
String errorMessage = "Artifact manager is null for tenant domain " + tenantDomain
+ " when retrieving all paginated APIs.";
log.error(errorMessage);
}
} catch (RegistryException e) {
handleException("Failed to get all published APIs", e);
} catch (UserStoreException e) {
handleException("Failed to get all published APIs", e);
}finally {
PaginationContext.destroy();
}
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
return result;
}
@Override
public Set<API> getTopRatedAPIs(int limit) throws APIManagementException {
int returnLimit = 0;
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
try {
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY);
if (artifactManager == null) {
String errorMessage = "Artifact manager is null when retrieving top rated APIs.";
log.error(errorMessage);
throw new APIManagementException(errorMessage);
}
GenericArtifact[] genericArtifacts = artifactManager.getAllGenericArtifacts();
if (genericArtifacts == null || genericArtifacts.length == 0) {
return apiSortedSet;
}
for (GenericArtifact genericArtifact : genericArtifacts) {
String status = APIUtil.getLcStateFromArtifact(genericArtifact);
if (APIConstants.PUBLISHED.equals(status)) {
String artifactPath = genericArtifact.getPath();
float rating = registry.getAverageRating(artifactPath);
if (rating > APIConstants.TOP_TATE_MARGIN && (returnLimit < limit)) {
returnLimit++;
API api = APIUtil.getAPI(genericArtifact, registry);
if (api != null) {
apiSortedSet.add(api);
}
}
}
}
} catch (RegistryException e) {
handleException("Failed to get top rated API", e);
}
return apiSortedSet;
}
/**
* Get the recently added APIs set
*
* @param limit no limit. Return everything else, limit the return list to specified value.
* @return Set<API>
* @throws APIManagementException
*/
@Override
public Set<API> getRecentlyAddedAPIs(int limit, String tenantDomain)
throws APIManagementException {
SortedSet<API> recentlyAddedAPIs = new TreeSet<API>(new APINameComparator());
SortedSet<API> recentlyAddedAPIsWithMultipleVersions = new TreeSet<API>(new APIVersionComparator());
Registry userRegistry;
APIManagerConfiguration config = getAPIManagerConfiguration();
boolean isRecentlyAddedAPICacheEnabled =
Boolean.parseBoolean(config.getFirstProperty(APIConstants.API_STORE_RECENTLY_ADDED_API_CACHE_ENABLE));
PrivilegedCarbonContext.startTenantFlow();
boolean isTenantFlowStarted ;
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
isTenantFlowStarted = true;
} else {
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true);
isTenantFlowStarted = true;
}
try {
boolean isTenantMode = (tenantDomain != null);
if ((isTenantMode && this.tenantDomain == null) || (isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant based store anonymous mode
int tenantId = getTenantId(tenantDomain);
// explicitly load the tenant's registry
APIUtil.loadTenantRegistry(tenantId);
setUsernameToThreadLocalCarbonContext(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME);
isTenantFlowStarted = true;
userRegistry = getGovernanceUserRegistry(tenantId);
} else {
userRegistry = registry;
setUsernameToThreadLocalCarbonContext(this.username);
isTenantFlowStarted = true;
}
if (isRecentlyAddedAPICacheEnabled) {
boolean isStatusChanged = false;
Set<API> recentlyAddedAPI = (Set<API>) Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER)
.getCache(APIConstants.RECENTLY_ADDED_API_CACHE_NAME).get(username + COLON_CHAR + tenantDomain);
if (recentlyAddedAPI != null) {
for (API api : recentlyAddedAPI) {
try {
if (!APIConstants.PUBLISHED.equalsIgnoreCase(userRegistry.get(APIUtil.getAPIPath(api.getId())).getProperty(APIConstants.API_STATUS))) {
isStatusChanged = true;
break;
}
} catch (Exception ex) {
log.error("Error while checking API status for APP " + api.getId().getApiName() + '-' +
api.getId().getVersion(), ex);
}
}
if (!isStatusChanged) {
return recentlyAddedAPI;
}
}
}
PaginationContext.init(0, limit, APIConstants.REGISTRY_ARTIFACT_SEARCH_DESC_ORDER,
APIConstants.CREATED_DATE, Integer.MAX_VALUE);
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
listMap.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(APIConstants.PUBLISHED);
}});
listMap.put(APIConstants.STORE_VIEW_ROLES, getUserRoleList());
String searchCriteria = APIConstants.LCSTATE_SEARCH_KEY + "= (" + APIConstants.PUBLISHED + ")";
//Find UUID
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY);
if (artifactManager != null) {
GenericArtifact[] genericArtifacts = artifactManager.findGovernanceArtifacts(getSearchQuery(searchCriteria));
SortedSet<API> allAPIs = new TreeSet<API>(new APINameComparator());
for (GenericArtifact artifact : genericArtifacts) {
API api = null;
try {
api = APIUtil.getAPI(artifact);
} catch (APIManagementException e) {
//just log and continue since we want to go through the other APIs as well.
log.error("Error loading API " + artifact.getAttribute(APIConstants.API_OVERVIEW_NAME), e);
}
if (api != null) {
allAPIs.add(api);
}
}
if (!APIUtil.isAllowDisplayMultipleVersions()) {
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
Comparator<API> versionComparator = new APIVersionComparator();
String key;
for (API api : allAPIs) {
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same
// name, make sure this one has a higher version
// number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
}
recentlyAddedAPIs.addAll(latestPublishedAPIs.values());
if (isRecentlyAddedAPICacheEnabled) {
Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER)
.getCache(APIConstants.RECENTLY_ADDED_API_CACHE_NAME)
.put(username + COLON_CHAR + tenantDomain, allAPIs);
}
return recentlyAddedAPIs;
} else {
recentlyAddedAPIsWithMultipleVersions.addAll(allAPIs);
if (isRecentlyAddedAPICacheEnabled) {
Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER)
.getCache(APIConstants.RECENTLY_ADDED_API_CACHE_NAME)
.put(username + COLON_CHAR + tenantDomain, allAPIs);
}
return recentlyAddedAPIsWithMultipleVersions;
}
} else {
String errorMessage = "Artifact manager is null when retrieving recently added APIs for tenant domain "
+ tenantDomain;
log.error(errorMessage);
}
} catch (RegistryException e) {
handleException("Failed to get all published APIs", e);
} catch (UserStoreException e) {
handleException("Failed to get all published APIs", e);
} finally {
PaginationContext.destroy();
if (isTenantFlowStarted) {
endTenantFlow();
}
}
return recentlyAddedAPIs;
}
@Override
public Set<Tag> getAllTags(String requestedTenantDomain) throws APIManagementException {
this.isTenantModeStoreView = (requestedTenantDomain != null);
if(requestedTenantDomain != null){
this.requestedTenant = requestedTenantDomain;
}
/* We keep track of the lastUpdatedTime of the TagCache to determine its freshness.
*/
long lastUpdatedTimeAtStart = lastUpdatedTime;
long currentTimeAtStart = System.currentTimeMillis();
if(isTagCacheEnabled && ( (currentTimeAtStart- lastUpdatedTimeAtStart) < tagCacheValidityTime)){
if(tagSet != null){
return tagSet;
}
}
TreeSet<Tag> tempTagSet = new TreeSet<Tag>(new Comparator<Tag>() {
@Override
public int compare(Tag o1, Tag o2) {
return o1.getName().compareTo(o2.getName());
}
});
Registry userRegistry = null;
boolean isTenantFlowStarted = false;
String tagsQueryPath = null;
try {
tagsQueryPath = RegistryConstants.QUERIES_COLLECTION_PATH + "/tag-summary";
Map<String, String> params = new HashMap<String, String>();
params.put(RegistryConstants.RESULT_TYPE_PROPERTY_NAME, RegistryConstants.TAG_SUMMARY_RESULT_TYPE);
//as a tenant, I'm browsing my own Store or I'm browsing a Store of another tenant..
if ((this.isTenantModeStoreView && this.tenantDomain==null) || (this.isTenantModeStoreView && isTenantDomainNotMatching(requestedTenantDomain))) {//Tenant based store anonymous mode
int tenantId = getTenantId(this.requestedTenant);
userRegistry = ServiceReferenceHolder.getInstance().getRegistryService().
getGovernanceUserRegistry(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME, tenantId);
} else {
userRegistry = registry;
}
Map<String, Tag> tagsData = new HashMap<String, Tag>();
try {
PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(((UserRegistry)userRegistry).getUserName());
if (requestedTenant != null ) {
isTenantFlowStarted = startTenantFlowForTenantDomain(requestedTenant);
PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(((UserRegistry)userRegistry).getUserName());
}
Map <String, List<String>> criteriaPublished = new HashMap<String, List<String>>();
criteriaPublished.put(APIConstants.LCSTATE_SEARCH_KEY, new ArrayList<String>() {{
add(APIConstants.PUBLISHED);
}});
//rxt api media type
List<TermData> termsPublished = GovernanceUtils
.getTermDataList(criteriaPublished, APIConstants.API_OVERVIEW_TAG,
APIConstants.API_RXT_MEDIA_TYPE, true);
if(termsPublished != null){
for(TermData data : termsPublished){
tempTagSet.add(new Tag(data.getTerm(), (int)data.getFrequency()));
}
}
Map<String, List<String>> criteriaPrototyped = new HashMap<String, List<String>>();
criteriaPrototyped.put(APIConstants.LCSTATE_SEARCH_KEY, new ArrayList<String>() {{
add(APIConstants.PROTOTYPED);
}});
//rxt api media type
List<TermData> termsPrototyped = GovernanceUtils
.getTermDataList(criteriaPrototyped, APIConstants.API_OVERVIEW_TAG,
APIConstants.API_RXT_MEDIA_TYPE, true);
if(termsPrototyped != null){
for(TermData data : termsPrototyped){
tempTagSet.add(new Tag(data.getTerm(), (int)data.getFrequency()));
}
}
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
synchronized (tagCacheMutex) {
lastUpdatedTime = System.currentTimeMillis();
this.tagSet = tempTagSet;
}
} catch (RegistryException e) {
try {
//Before a tenant login to the store or publisher at least one time,
//a registry exception is thrown when the tenant store is accessed in anonymous mode.
//This fix checks whether query resource available in the registry. If not
// give a warn.
if (userRegistry != null && !userRegistry.resourceExists(tagsQueryPath)) {
log.warn("Failed to retrieve tags query resource at " + tagsQueryPath);
return tagSet == null ? Collections.EMPTY_SET : tagSet;
}
} catch (RegistryException e1) {
// Even if we should ignore this exception, we are logging this as a warn log.
// The reason is that, this error happens when we try to add some additional logs in an error
// scenario and it does not affect the execution path.
log.warn("Unable to execute the resource exist method for tags query resource path : " + tagsQueryPath,
e1);
}
handleException("Failed to get all the tags", e);
} catch (UserStoreException e) {
handleException("Failed to get all the tags", e);
}
return tagSet;
}
@Override
public Set<Tag> getTagsWithAttributes(String tenantDomain) throws APIManagementException {
// Fetch the all the tags first.
Set<Tag> tags = getAllTags(tenantDomain);
// For each and every tag get additional attributes from the registry.
String descriptionPathPattern = APIConstants.TAGS_INFO_ROOT_LOCATION + "/%s/description.txt";
String thumbnailPathPattern = APIConstants.TAGS_INFO_ROOT_LOCATION + "/%s/thumbnail.png";
//if the tenantDomain is not specified super tenant domain is used
if (StringUtils.isBlank(tenantDomain)) {
try {
tenantDomain = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager().getSuperTenantDomain();
} catch (org.wso2.carbon.user.core.UserStoreException e) {
handleException("Cannot get super tenant domain name", e);
}
}
//get the registry instance related to the tenant domain
UserRegistry govRegistry = null;
try {
int tenantId = getTenantId(tenantDomain);
RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService();
govRegistry = registryService.getGovernanceSystemRegistry(tenantId);
} catch (UserStoreException e) {
handleException("Cannot get tenant id for tenant domain name:" + tenantDomain, e);
} catch (RegistryException e) {
handleException("Cannot get registry for tenant domain name:" + tenantDomain, e);
}
if (govRegistry != null) {
for (Tag tag : tags) {
// Get the description.
Resource descriptionResource = null;
String descriptionPath = String.format(descriptionPathPattern, tag.getName());
try {
if (govRegistry.resourceExists(descriptionPath)) {
descriptionResource = govRegistry.get(descriptionPath);
}
} catch (RegistryException e) {
//warn and proceed to the next tag
log.warn(String.format("Error while querying the existence of the description for the tag '%s'",
tag.getName()), e);
}
// The resource is assumed to be a byte array since its the content
// of a text file.
if (descriptionResource != null) {
try {
String description = new String((byte[]) descriptionResource.getContent(),
Charset.defaultCharset());
tag.setDescription(description);
} catch (ClassCastException e) {
//added warnings as it can then proceed to load rest of resources/tags
log.warn(String.format("Cannot cast content of %s to byte[]", descriptionPath), e);
} catch (RegistryException e) {
//added warnings as it can then proceed to load rest of resources/tags
log.warn(String.format("Cannot read content of %s", descriptionPath), e);
}
}
// Checks whether the thumbnail exists.
String thumbnailPath = String.format(thumbnailPathPattern, tag.getName());
try {
boolean isThumbnailExists = govRegistry.resourceExists(thumbnailPath);
tag.setThumbnailExists(isThumbnailExists);
if (isThumbnailExists) {
tag.setThumbnailUrl(APIUtil.getRegistryResourcePathForUI(
APIConstants.RegistryResourceTypesForUI.TAG_THUMBNAIL, tenantDomain, thumbnailPath));
}
} catch (RegistryException e) {
//warn and then proceed to load rest of tags
log.warn(String.format("Error while querying the existence of %s", thumbnailPath), e);
}
}
}
return tags;
}
@Override
public void rateAPI(APIIdentifier apiId, APIRating rating,
String user) throws APIManagementException {
apiMgtDAO.addRating(apiId, rating.getRating(), user);
}
@Override
public void removeAPIRating(APIIdentifier apiId, String user) throws APIManagementException {
apiMgtDAO.removeAPIRating(apiId, user);
}
@Override
public int getUserRating(APIIdentifier apiId, String user) throws APIManagementException {
return apiMgtDAO.getUserRating(apiId, user);
}
@Override
public Set<API> getPublishedAPIsByProvider(String providerId, int limit)
throws APIManagementException {
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
SortedSet<API> apiVersionsSortedSet = new TreeSet<API>(new APIVersionComparator());
try {
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
List<API> multiVersionedAPIs = new ArrayList<API>();
Comparator<API> versionComparator = new APIVersionComparator();
Boolean displayMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
Boolean displayAPIsWithMultipleStatus = APIUtil.isAllowDisplayAPIsWithMultipleStatus();
String providerPath = APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR + providerId;
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY);
if (artifactManager == null) {
String errorMessage =
"Artifact manager is null when retrieving published APIs by provider ID " + providerId;
log.error(errorMessage);
throw new APIManagementException(errorMessage);
}
Association[] associations = registry.getAssociations(providerPath, APIConstants.PROVIDER_ASSOCIATION);
if (associations.length < limit || limit == -1) {
limit = associations.length;
}
for (int i = 0; i < limit; i++) {
Association association = associations[i];
String apiPath = association.getDestinationPath();
Resource resource = registry.get(apiPath);
String apiArtifactId = resource.getUUID();
if (apiArtifactId != null) {
GenericArtifact artifact = artifactManager.getGenericArtifact(apiArtifactId);
// check the API status
String status = APIUtil.getLcStateFromArtifact(artifact);
API api = null;
//Check the api-manager.xml config file entry <DisplayAllAPIs> value is false
if (!displayAPIsWithMultipleStatus) {
// then we are only interested in published APIs here...
if (APIConstants.PUBLISHED.equals(status)) {
api = APIUtil.getAPI(artifact);
}
} else { // else we are interested in both deprecated/published APIs here...
if (APIConstants.PUBLISHED.equals(status) || APIConstants.DEPRECATED.equals(status)) {
api = APIUtil.getAPI(artifact);
}
}
if (api != null) {
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
multiVersionedAPIs.add(api);
}
}
} else {
throw new GovernanceException("artifact id is null of " + apiPath);
}
}
if (!displayMultipleVersions) {
apiSortedSet.addAll(latestPublishedAPIs.values());
return apiSortedSet;
} else {
apiVersionsSortedSet.addAll(multiVersionedAPIs);
return apiVersionsSortedSet;
}
} catch (RegistryException e) {
handleException("Failed to get Published APIs for provider : " + providerId, e);
}
return null;
}
@Override
public Set<API> getPublishedAPIsByProvider(String providerId, String loggedUsername, int limit, String apiOwner,
String apiBizOwner) throws APIManagementException {
try {
Boolean allowMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
Boolean showAllAPIs = APIUtil.isAllowDisplayAPIsWithMultipleStatus();
String providerDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(providerId));
int tenantId = getTenantId(providerDomain);
final Registry registry = ServiceReferenceHolder.getInstance().
getRegistryService().getGovernanceSystemRegistry(tenantId);
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry,
APIConstants.API_KEY);
if (artifactManager == null) {
String errorMessage =
"Artifact manager is null when retrieving all published APIs by provider ID " + providerId;
log.error(errorMessage);
throw new APIManagementException(errorMessage);
}
int publishedAPICount = 0;
Map<String, API> apiCollection = new HashMap<String, API>();
if(apiBizOwner != null && !apiBizOwner.isEmpty()){
try {
final String bizOwner = apiBizOwner;
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
listMap.put(APIConstants.API_OVERVIEW_BUSS_OWNER, new ArrayList<String>() {{
add(bizOwner);
}});
PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(this.username);
GenericArtifact[] genericArtifacts = artifactManager.findGenericArtifacts(listMap);
if(genericArtifacts != null && genericArtifacts.length > 0){
for(GenericArtifact artifact : genericArtifacts){
if (publishedAPICount >= limit) {
break;
}
if(isCandidateAPI(artifact.getPath(), loggedUsername, artifactManager, tenantId, showAllAPIs,
allowMultipleVersions, apiOwner, providerId, registry, apiCollection)){
publishedAPICount += 1;
}
}
}
} catch (GovernanceException e) {
log.error("Error while finding APIs by business owner " + apiBizOwner, e);
return null;
}
}
else{
String providerPath = APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR + providerId;
Association[] associations = registry.getAssociations(providerPath, APIConstants.PROVIDER_ASSOCIATION);
for (Association association : associations) {
if (publishedAPICount >= limit) {
break;
}
String apiPath = association.getDestinationPath();
if(isCandidateAPI(apiPath, loggedUsername, artifactManager, tenantId, showAllAPIs,
allowMultipleVersions, apiOwner, providerId, registry, apiCollection)){
publishedAPICount += 1;
}
}
}
return new HashSet<API>(apiCollection.values());
} catch (RegistryException e) {
handleException("Failed to get Published APIs for provider : " + providerId, e);
return null;
} catch (org.wso2.carbon.user.core.UserStoreException e) {
handleException("Failed to get Published APIs for provider : " + providerId, e);
return null;
} catch (UserStoreException e) {
handleException("Failed to get Published APIs for provider : " + providerId, e);
return null;
}
}
private boolean isCandidateAPI(String apiPath, String loggedUsername, GenericArtifactManager artifactManager,
int tenantId, boolean showAllAPIs, boolean allowMultipleVersions,
String apiOwner, String providerId, Registry registry, Map<String, API> apiCollection)
throws UserStoreException, RegistryException, APIManagementException {
AuthorizationManager manager = ServiceReferenceHolder.getInstance().getRealmService().
getTenantUserRealm(tenantId).getAuthorizationManager();
Comparator<API> versionComparator = new APIVersionComparator();
Resource resource;
String path = RegistryUtils.getAbsolutePath(RegistryContext.getBaseInstance(),
APIUtil.getMountedPath(RegistryContext.getBaseInstance(),
RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) +
apiPath);
boolean checkAuthorized;
String userNameWithoutDomain = loggedUsername;
if (!loggedUsername.isEmpty() && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(super.tenantDomain)) {
String[] nameParts = loggedUsername.split("@");
userNameWithoutDomain = nameParts[0];
}
int loggedInUserTenantDomain = -1;
if(!StringUtils.isEmpty(loggedUsername)) {
loggedInUserTenantDomain = APIUtil.getTenantId(loggedUsername);
}
if (loggedUsername.isEmpty()) {
// Anonymous user is viewing.
checkAuthorized = manager.isRoleAuthorized(APIConstants.ANONYMOUS_ROLE, path, ActionConstants.GET);
} else if (tenantId != loggedInUserTenantDomain) {
//Cross tenant scenario
providerId = APIUtil.replaceEmailDomainBack(providerId);
String[] nameParts = providerId.split("@");
String provideNameWithoutDomain = nameParts[0];
checkAuthorized = manager.isUserAuthorized(provideNameWithoutDomain, path, ActionConstants.GET);
} else {
// Some user is logged in also user and api provider tenant domain are same.
checkAuthorized = manager.isUserAuthorized(userNameWithoutDomain, path, ActionConstants.GET);
}
String apiArtifactId = null;
if (checkAuthorized) {
resource = registry.get(apiPath);
apiArtifactId = resource.getUUID();
}
if (apiArtifactId != null) {
GenericArtifact artifact = artifactManager.getGenericArtifact(apiArtifactId);
// check the API status
String status = APIUtil.getLcStateFromArtifact(artifact);
API api = null;
//Check the api-manager.xml config file entry <DisplayAllAPIs> value is false
if (!showAllAPIs) {
// then we are only interested in published APIs here...
if (APIConstants.PUBLISHED.equals(status)) {
api = APIUtil.getAPI(artifact);
}
} else { // else we are interested in both deprecated/published APIs here...
if (APIConstants.PUBLISHED.equals(status) || APIConstants.DEPRECATED.equals(status)) {
api = APIUtil.getAPI(artifact);
}
}
if (api != null) {
String apiVisibility = api.getVisibility();
if(!StringUtils.isEmpty(apiVisibility) && !APIConstants.API_GLOBAL_VISIBILITY.equalsIgnoreCase(apiVisibility)) {
String providerDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(providerId));
String loginUserDomain = MultitenantUtils.getTenantDomain(loggedUsername);
if(!StringUtils.isEmpty(providerDomain) && !StringUtils.isEmpty(loginUserDomain)
&& !providerDomain.equals(loginUserDomain)){
return false;
}
}
// apiOwner is the value coming from front end and compared against the API instance
if (apiOwner != null && !apiOwner.isEmpty()) {
if (APIUtil.replaceEmailDomainBack(providerId).equals(APIUtil.replaceEmailDomainBack(apiOwner)) &&
api.getApiOwner() != null && !api.getApiOwner().isEmpty() &&
!APIUtil.replaceEmailDomainBack(apiOwner)
.equals(APIUtil.replaceEmailDomainBack(api.getApiOwner()))) {
return false; // reject remote APIs when local admin user's API selected
} else if (!APIUtil.replaceEmailDomainBack(providerId).equals(APIUtil.replaceEmailDomainBack(apiOwner)) &&
!APIUtil.replaceEmailDomainBack(apiOwner)
.equals(APIUtil.replaceEmailDomainBack(api.getApiOwner()))) {
return false; // reject local admin's APIs when remote API selected
}
}
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!allowMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = apiCollection.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
apiCollection.put(key, api);
return true;
}
} else {
// We haven't seen this API before
apiCollection.put(key, api);
return true;
}
} else { //If allow showing multiple versions of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName() + COLON_CHAR + api.getId()
.getVersion();
//we're not really interested in the key, so generate one for the sake of adding this element to
//the map.
key = key + '_' + apiCollection.size();
apiCollection.put(key, api);
return true;
}
}
}
return false;
}
@Override
public Map<String,Object> searchPaginatedAPIs(String searchTerm, String searchType, String requestedTenantDomain,int start,int end, boolean isLazyLoad)
throws APIManagementException {
Map<String,Object> result = new HashMap<String,Object>();
boolean isTenantFlowStarted = false;
try {
boolean isTenantMode=(requestedTenantDomain != null);
if (isTenantMode && !org.wso2.carbon.base.MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(requestedTenantDomain)) {
isTenantFlowStarted = true;
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(requestedTenantDomain, true);
} else {
requestedTenantDomain = org.wso2.carbon.base.MultitenantConstants.SUPER_TENANT_DOMAIN_NAME;
isTenantFlowStarted = true;
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(requestedTenantDomain, true);
}
Registry userRegistry;
int tenantIDLocal = 0;
String userNameLocal = this.username;
if ((isTenantMode && this.tenantDomain==null) || (isTenantMode && isTenantDomainNotMatching(requestedTenantDomain))) {//Tenant store anonymous mode
tenantIDLocal = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(requestedTenantDomain);
userRegistry = ServiceReferenceHolder.getInstance().
getRegistryService().getGovernanceUserRegistry(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME, tenantIDLocal);
userNameLocal = CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME;
} else {
userRegistry = this.registry;
tenantIDLocal = tenantId;
}
PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(userNameLocal);
if (APIConstants.DOCUMENTATION_SEARCH_TYPE_PREFIX.equalsIgnoreCase(searchType)) {
Map<Documentation, API> apiDocMap =
APIUtil.searchAPIsByDoc(userRegistry, tenantIDLocal, userNameLocal, searchTerm,
APIConstants.STORE_CLIENT);
result.put("apis", apiDocMap);
/*Pagination for Document search results is not supported yet, hence length is sent as end-start*/
if (apiDocMap.isEmpty()) {
result.put("length", 0);
} else {
result.put("length", end-start);
}
}
else if ("subcontext".equalsIgnoreCase(searchType)) {
result = APIUtil.searchAPIsByURLPattern(userRegistry, searchTerm, start,end); ;
}else {
result=searchPaginatedAPIs(userRegistry, searchTerm, searchType,start,end,isLazyLoad);
}
} catch (Exception e) {
handleException("Failed to Search APIs", e);
} finally {
if (isTenantFlowStarted) {
PrivilegedCarbonContext.endTenantFlow();
}
}
return result;
}
/**
* Pagination API search based on solr indexing
*
* @param registry
* @param searchTerm
* @param searchType
* @return
* @throws APIManagementException
*/
public Map<String,Object> searchPaginatedAPIs(Registry registry, String searchTerm, String searchType,int start,int end, boolean limitAttributes) throws APIManagementException {
SortedSet<API> apiSet = new TreeSet<API>(new APINameComparator());
List<API> apiList = new ArrayList<API>();
searchTerm = searchTerm.trim();
Map<String,Object> result=new HashMap<String, Object>();
int totalLength=0;
boolean isMore = false;
String criteria=APIConstants.API_OVERVIEW_NAME;
try {
String paginationLimit = getAPIManagerConfiguration()
.getFirstProperty(APIConstants.API_STORE_APIS_PER_PAGE);
// If the Config exists use it to set the pagination limit
final int maxPaginationLimit;
if (paginationLimit != null) {
// The additional 1 added to the maxPaginationLimit is to help us determine if more
// APIs may exist so that we know that we are unable to determine the actual total
// API count. We will subtract this 1 later on so that it does not interfere with
// the logic of the rest of the application
int pagination = Integer.parseInt(paginationLimit);
// Because the store jaggery pagination logic is 10 results per a page we need to set pagination
// limit to at least 11 or the pagination done at this level will conflict with the store pagination
// leading to some of the APIs not being displayed
if (pagination < 11) {
pagination = 11;
log.warn("Value of '" + APIConstants.API_STORE_APIS_PER_PAGE + "' is too low, defaulting to 11");
}
maxPaginationLimit = start + pagination + 1;
}
// Else if the config is not specified we go with default functionality and load all
else {
maxPaginationLimit = Integer.MAX_VALUE;
}
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY);
PaginationContext.init(start, end, "ASC", APIConstants.API_OVERVIEW_NAME, maxPaginationLimit);
if (artifactManager != null) {
if (APIConstants.API_PROVIDER.equalsIgnoreCase(searchType)) {
criteria = APIConstants.API_OVERVIEW_PROVIDER;
searchTerm = searchTerm.replaceAll("@", "-AT-");
} else if (APIConstants.API_VERSION_LABEL.equalsIgnoreCase(searchType)) {
criteria = APIConstants.API_OVERVIEW_VERSION;
} else if (APIConstants.API_CONTEXT.equalsIgnoreCase(searchType)) {
criteria = APIConstants.API_OVERVIEW_CONTEXT;
} else if (APIConstants.API_DESCRIPTION.equalsIgnoreCase(searchType)) {
criteria = APIConstants.API_OVERVIEW_DESCRIPTION;
} else if (APIConstants.API_TAG.equalsIgnoreCase(searchType)) {
criteria = APIConstants.API_OVERVIEW_TAG;
}
//Create the search attribute map for PUBLISHED APIs
final String searchValue = searchTerm;
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
listMap.put(criteria, new ArrayList<String>() {{
add(searchValue);
}});
boolean displayAPIsWithMultipleStatus = APIUtil.isAllowDisplayAPIsWithMultipleStatus();
//This is due to take only the published APIs from the search if there is no need to return APIs with
//multiple status. This is because pagination is breaking when we do a another filtering with the API Status
if (!displayAPIsWithMultipleStatus) {
listMap.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(APIConstants.PUBLISHED);
}});
}
GenericArtifact[] genericArtifacts = artifactManager.findGenericArtifacts(listMap);
totalLength = PaginationContext.getInstance().getLength();
boolean isFound = true;
if (genericArtifacts == null || genericArtifacts.length == 0) {
if (APIConstants.API_OVERVIEW_PROVIDER.equals(criteria)) {
genericArtifacts = searchAPIsByOwner(artifactManager, searchValue);
if (genericArtifacts == null || genericArtifacts.length == 0) {
isFound = false;
}
}
else {
isFound = false;
}
}
if (!isFound) {
result.put("apis", apiSet);
result.put("length", 0);
result.put("isMore", isMore);
return result;
}
// Check to see if we can speculate that there are more APIs to be loaded
if (maxPaginationLimit == totalLength) {
isMore = true; // More APIs exist, cannot determine total API count without incurring perf hit
--totalLength; // Remove the additional 1 added earlier when setting max pagination limit
}
int tempLength =0;
for (GenericArtifact artifact : genericArtifacts) {
String status = artifact.getAttribute(APIConstants.API_OVERVIEW_STATUS);
if (APIUtil.isAllowDisplayAPIsWithMultipleStatus()) {
if (APIConstants.PROTOTYPED.equals(status) || APIConstants.PUBLISHED.equals(status)
|| APIConstants.DEPRECATED.equals(status)) {
API resultAPI;
if (limitAttributes) {
resultAPI = APIUtil.getAPI(artifact);
} else {
resultAPI = APIUtil.getAPI(artifact, registry);
}
if (resultAPI != null) {
apiList.add(resultAPI);
}
}
} else {
if (APIConstants.PROTOTYPED.equals(status) || APIConstants.PUBLISHED.equals(status)) {
API resultAPI;
if (limitAttributes) {
resultAPI = APIUtil.getAPI(artifact);
} else {
resultAPI = APIUtil.getAPI(artifact, registry);
}
if (resultAPI != null) {
apiList.add(resultAPI);
}
}
}
// Ensure the APIs returned matches the length, there could be an additional API
// returned due incrementing the pagination limit when getting from registry
tempLength++;
if (tempLength >= totalLength){
break;
}
}
apiSet.addAll(apiList);
}
} catch (RegistryException e) {
handleException("Failed to search APIs with type", e);
}
result.put("apis",apiSet);
result.put("length",totalLength);
result.put("isMore", isMore);
return result;
}
private GenericArtifact[] searchAPIsByOwner(GenericArtifactManager artifactManager, final String searchValue) throws GovernanceException {
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
listMap.put(APIConstants.API_OVERVIEW_OWNER, new ArrayList<String>() {
{
add(searchValue);
}
});
return artifactManager.findGenericArtifacts(listMap);
}
/**
*This method will delete application key mapping table and application registration table.
*@param applicationName application Name
*@param tokenType Token Type.
*@param groupId group id.
*@param userName user name.
*@return
*@throws APIManagementException
*/
@Override
public void cleanUpApplicationRegistration(String applicationName ,String tokenType ,String groupId ,String
userName) throws APIManagementException{
Application application = apiMgtDAO.getApplicationByName(applicationName, userName, groupId);
String applicationId = String.valueOf(application.getId());
cleanUpApplicationRegistrationByApplicationId(applicationId, tokenType);
}
/*
* @see super.cleanUpApplicationRegistrationByApplicationId
* */
@Override
public void cleanUpApplicationRegistrationByApplicationId(String applicationId, String tokenType) throws APIManagementException {
apiMgtDAO.deleteApplicationRegistration(applicationId , tokenType);
apiMgtDAO.deleteApplicationKeyMappingByApplicationIdAndType(applicationId, tokenType);
apiMgtDAO.getConsumerkeyByApplicationIdAndKeyType(applicationId, tokenType);
}
/**
*
* @param jsonString this string will contain oAuth app details
* @param userName user name of logged in user.
* @param clientId this is the consumer key of oAuthApplication
* @param applicationName this is the APIM appication name.
* @param keyType
* @param tokenType this is theApplication Token Type. This can be either default or jwt.
* @return
* @throws APIManagementException
*/
@Override
public Map<String, Object> mapExistingOAuthClient(String jsonString, String userName, String clientId,
String applicationName, String keyType, String tokenType)
throws APIManagementException {
String callBackURL = null;
OAuthAppRequest oauthAppRequest = ApplicationUtils.createOauthAppRequest(applicationName, clientId, callBackURL,
"default",
jsonString, tokenType);
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
// Checking if clientId is mapped with another application.
if (apiMgtDAO.isMappingExistsforConsumerKey(clientId)) {
String message = "Consumer Key " + clientId + " is used for another Application.";
log.error(message);
throw new APIManagementException(message);
}
log.debug("Client ID not mapped previously with another application.");
//createApplication on oAuthorization server.
OAuthApplicationInfo oAuthApplication = keyManager.mapOAuthApplication(oauthAppRequest);
//Do application mapping with consumerKey.
apiMgtDAO.createApplicationKeyTypeMappingForManualClients(keyType, applicationName, userName, clientId);
AccessTokenInfo tokenInfo;
if (oAuthApplication.getJsonString().contains(APIConstants.GRANT_TYPE_CLIENT_CREDENTIALS)) {
AccessTokenRequest tokenRequest = ApplicationUtils.createAccessTokenRequest(oAuthApplication, null);
tokenInfo = keyManager.getNewApplicationAccessToken(tokenRequest);
} else {
tokenInfo = new AccessTokenInfo();
tokenInfo.setAccessToken("");
tokenInfo.setValidityPeriod(0L);
String[] noScopes = new String[] {"N/A"};
tokenInfo.setScope(noScopes);
oAuthApplication.addParameter("tokenScope", Arrays.toString(noScopes));
}
Map<String, Object> keyDetails = new HashMap<String, Object>();
if (tokenInfo != null) {
keyDetails.put("validityTime", Long.toString(tokenInfo.getValidityPeriod()));
keyDetails.put("accessToken", tokenInfo.getAccessToken());
keyDetails.put("tokenDetails", tokenInfo.getJSONString());
}
keyDetails.put("consumerKey", oAuthApplication.getClientId());
keyDetails.put("consumerSecret", oAuthApplication.getParameter("client_secret"));
keyDetails.put("appDetails", oAuthApplication.getJsonString());
return keyDetails;
}
/** returns the SubscribedAPI object which is related to the subscriptionId
*
* @param subscriptionId subscription id
* @return
* @throws APIManagementException
*/
@Override
public SubscribedAPI getSubscriptionById(int subscriptionId) throws APIManagementException {
return apiMgtDAO.getSubscriptionById(subscriptionId);
}
@Override
public Set<SubscribedAPI> getSubscribedAPIs(Subscriber subscriber) throws APIManagementException {
return getSubscribedAPIs(subscriber, null);
}
@Override
public Set<SubscribedAPI> getSubscribedAPIs(Subscriber subscriber, String groupingId) throws APIManagementException {
Set<SubscribedAPI> originalSubscribedAPIs;
Set<SubscribedAPI> subscribedAPIs = new HashSet<SubscribedAPI>();
try {
originalSubscribedAPIs = apiMgtDAO.getSubscribedAPIs(subscriber, groupingId);
if (originalSubscribedAPIs != null && !originalSubscribedAPIs.isEmpty()) {
Map<String, Tier> tiers = APIUtil.getTiers(tenantId);
for (SubscribedAPI subscribedApi : originalSubscribedAPIs) {
Tier tier = tiers.get(subscribedApi.getTier().getName());
subscribedApi.getTier().setDisplayName(tier != null ? tier.getDisplayName() : subscribedApi.getTier().getName());
subscribedAPIs.add(subscribedApi);
}
}
} catch (APIManagementException e) {
handleException("Failed to get APIs of " + subscriber.getName(), e);
}
return subscribedAPIs;
}
@Override
public Set<SubscribedAPI> getSubscribedAPIs(Subscriber subscriber, String applicationName, String groupingId)
throws APIManagementException {
Set<SubscribedAPI> subscribedAPIs = null;
try {
subscribedAPIs = apiMgtDAO.getSubscribedAPIs(subscriber, applicationName, groupingId);
if (subscribedAPIs != null && !subscribedAPIs.isEmpty()) {
Map<String, Tier> tiers = APIUtil.getTiers(tenantId);
for (SubscribedAPI subscribedApi : subscribedAPIs) {
Tier tier = tiers.get(subscribedApi.getTier().getName());
subscribedApi.getTier().setDisplayName(tier != null ? tier.getDisplayName() : subscribedApi
.getTier().getName());
// We do not need to add the modified object again.
}
}
} catch (APIManagementException e) {
handleException("Failed to get APIs of " + subscriber.getName() + " under application " + applicationName, e);
}
return subscribedAPIs;
}
/*
*@see super.getSubscribedAPIsByApplicationId
*
*/
@Override
public Set<SubscribedAPI> getSubscribedAPIsByApplicationId(Subscriber subscriber, int applicationId, String groupingId) throws APIManagementException {
Set<SubscribedAPI> subscribedAPIs = null;
try {
subscribedAPIs = apiMgtDAO.getSubscribedAPIsByApplicationId(subscriber, applicationId, groupingId);
if (subscribedAPIs != null && !subscribedAPIs.isEmpty()) {
Map<String, Tier> tiers = APIUtil.getTiers(tenantId);
for (SubscribedAPI subscribedApi : subscribedAPIs) {
Tier tier = tiers.get(subscribedApi.getTier().getName());
subscribedApi.getTier().setDisplayName(tier != null ? tier.getDisplayName() : subscribedApi
.getTier().getName());
// We do not need to add the modified object again.
Set<APIKey> keys = getApplicationKeys(subscribedApi.getApplication().getId());
for (APIKey key : keys) {
subscribedApi.getApplication().addKey(key);
}
}
}
} catch (APIManagementException e) {
handleException("Failed to get APIs of " + subscriber.getName() + " under application " + applicationId, e);
}
return subscribedAPIs;
}
@Override
public Set<SubscribedAPI> getPaginatedSubscribedAPIs(Subscriber subscriber, String applicationName,
int startSubIndex, int endSubIndex, String groupingId)
throws APIManagementException {
Set<SubscribedAPI> subscribedAPIs = null;
try {
subscribedAPIs = apiMgtDAO.getPaginatedSubscribedAPIs(subscriber, applicationName, startSubIndex,
endSubIndex, groupingId);
if (subscribedAPIs != null && !subscribedAPIs.isEmpty()) {
Map<String, Tier> tiers = APIUtil.getTiers(tenantId);
for (SubscribedAPI subscribedApi : subscribedAPIs) {
Tier tier = tiers.get(subscribedApi.getTier().getName());
subscribedApi.getTier().setDisplayName(tier != null ? tier.getDisplayName() : subscribedApi
.getTier().getName());
// We do not need to add the modified object again.
// subscribedAPIs.add(subscribedApi);
}
}
} catch (APIManagementException e) {
handleException("Failed to get APIs of " + subscriber.getName() + " under application " + applicationName, e);
}
return subscribedAPIs;
}
@Override
public Set<SubscribedAPI> getPaginatedSubscribedAPIs(Subscriber subscriber, int applicationId, int startSubIndex,
int endSubIndex, String groupingId) throws APIManagementException {
Set<SubscribedAPI> subscribedAPIs = null;
try {
subscribedAPIs = apiMgtDAO.getPaginatedSubscribedAPIs(subscriber, applicationId, startSubIndex,
endSubIndex, groupingId);
if (subscribedAPIs != null && !subscribedAPIs.isEmpty()) {
Map<String, Tier> tiers = APIUtil.getTiers(tenantId);
for (SubscribedAPI subscribedApi : subscribedAPIs) {
Tier tier = tiers.get(subscribedApi.getTier().getName());
subscribedApi.getTier().setDisplayName(tier != null ? tier.getDisplayName() : subscribedApi
.getTier().getName());
// We do not need to add the modified object again.
// subscribedAPIs.add(subscribedApi);
Set<APIKey> keys = getApplicationKeys(subscribedApi.getApplication().getId());
for (APIKey key : keys) {
subscribedApi.getApplication().addKey(key);
}
}
}
} catch (APIManagementException e) {
String msg = "Failed to get APIs of " + subscriber.getName() + " under application " + applicationId;
log.error(msg, e);
throw new APIManagementException(msg, e);
}
return subscribedAPIs;
}
public Integer getSubscriptionCount(Subscriber subscriber,String applicationName,String groupingId)
throws APIManagementException {
return apiMgtDAO.getSubscriptionCount(subscriber,applicationName,groupingId);
}
public Integer getSubscriptionCountByApplicationId(Subscriber subscriber, int applicationId, String groupingId)
throws APIManagementException {
return apiMgtDAO.getSubscriptionCountByApplicationId(subscriber, applicationId, groupingId);
}
@Override
public Set<APIIdentifier> getAPIByConsumerKey(String accessToken) throws APIManagementException {
try {
return apiMgtDAO.getAPIByConsumerKey(accessToken);
} catch (APIManagementException e) {
handleException("Error while obtaining API from API key", e);
}
return null;
}
@Override
public boolean isSubscribed(APIIdentifier apiIdentifier, String userId)
throws APIManagementException {
boolean isSubscribed;
try {
isSubscribed = apiMgtDAO.isSubscribed(apiIdentifier, userId);
} catch (APIManagementException e) {
String msg = "Failed to check if user(" + userId + ") has subscribed to " + apiIdentifier;
log.error(msg, e);
throw new APIManagementException(msg, e);
}
return isSubscribed;
}
@Override
public SubscriptionResponse addSubscription(APIIdentifier identifier, String userId, int applicationId)
throws APIManagementException {
API api = getAPI(identifier);
WorkflowResponse workflowResponse = null;
int subscriptionId;
String tenantAwareUsername = MultitenantUtils.getTenantAwareUsername(userId);
if (APIConstants.PUBLISHED.equals(api.getStatus())) {
subscriptionId = apiMgtDAO.addSubscription(identifier, api.getContext(), applicationId,
APIConstants.SubscriptionStatus.ON_HOLD, tenantAwareUsername);
boolean isTenantFlowStarted = false;
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
isTenantFlowStarted = startTenantFlowForTenantDomain(tenantDomain);
}
String applicationName = apiMgtDAO.getApplicationNameFromId(applicationId);
try {
WorkflowExecutor addSubscriptionWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_CREATION);
SubscriptionWorkflowDTO workflowDTO = new SubscriptionWorkflowDTO();
workflowDTO.setStatus(WorkflowStatus.CREATED);
workflowDTO.setCreatedTime(System.currentTimeMillis());
workflowDTO.setTenantDomain(tenantDomain);
workflowDTO.setTenantId(tenantId);
workflowDTO.setExternalWorkflowReference(addSubscriptionWFExecutor.generateUUID());
workflowDTO.setWorkflowReference(String.valueOf(subscriptionId));
workflowDTO.setWorkflowType(WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_CREATION);
workflowDTO.setCallbackUrl(addSubscriptionWFExecutor.getCallbackURL());
workflowDTO.setApiName(identifier.getApiName());
workflowDTO.setApiContext(api.getContext());
workflowDTO.setApiVersion(identifier.getVersion());
workflowDTO.setApiProvider(identifier.getProviderName());
workflowDTO.setTierName(identifier.getTier());
workflowDTO.setApplicationName(apiMgtDAO.getApplicationNameFromId(applicationId));
workflowDTO.setApplicationId(applicationId);
workflowDTO.setSubscriber(userId);
workflowResponse = addSubscriptionWFExecutor.execute(workflowDTO);
} catch (WorkflowException e) {
//If the workflow execution fails, roll back transaction by removing the subscription entry.
apiMgtDAO.removeSubscriptionById(subscriptionId);
log.error("Could not execute Workflow", e);
throw new APIManagementException("Could not execute Workflow", e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
if (APIUtil.isAPIGatewayKeyCacheEnabled()) {
invalidateCachedKeys(applicationId);
}
//to handle on-the-fly subscription rejection (and removal of subscription entry from the database)
//the response should have {"Status":"REJECTED"} in the json payload for this to work.
boolean subscriptionRejected = false;
String subscriptionStatus = null;
String subscriptionUUID = "";
if (workflowResponse != null && workflowResponse.getJSONPayload() != null
&& !workflowResponse.getJSONPayload().isEmpty()) {
try {
JSONObject wfResponseJson = (JSONObject) new JSONParser().parse(workflowResponse.getJSONPayload());
if (APIConstants.SubscriptionStatus.REJECTED.equals(wfResponseJson.get("Status"))) {
subscriptionRejected = true;
subscriptionStatus = APIConstants.SubscriptionStatus.REJECTED;
}
} catch (ParseException e) {
log.error('\'' + workflowResponse.getJSONPayload() + "' is not a valid JSON.", e);
}
}
if (!subscriptionRejected) {
SubscribedAPI addedSubscription = getSubscriptionById(subscriptionId);
subscriptionStatus = addedSubscription.getSubStatus();
subscriptionUUID = addedSubscription.getUUID();
JSONObject subsLogObject = new JSONObject();
subsLogObject.put(APIConstants.AuditLogConstants.API_NAME, identifier.getApiName());
subsLogObject.put(APIConstants.AuditLogConstants.PROVIDER, identifier.getProviderName());
subsLogObject.put(APIConstants.AuditLogConstants.APPLICATION_ID, applicationId);
subsLogObject.put(APIConstants.AuditLogConstants.APPLICATION_NAME, applicationName);
subsLogObject.put(APIConstants.AuditLogConstants.TIER, identifier.getTier());
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.SUBSCRIPTION, subsLogObject.toString(),
APIConstants.AuditLogConstants.CREATED, this.username);
workflowResponse = new GeneralWorkflowResponse();
}
if (log.isDebugEnabled()) {
String logMessage = "API Name: " + identifier.getApiName() + ", API Version " + identifier.getVersion()
+ ", Subscription Status: " + subscriptionStatus + " subscribe by " + userId
+ " for app " + applicationName;
log.debug(logMessage);
}
return new SubscriptionResponse(subscriptionStatus, subscriptionUUID, workflowResponse);
} else {
throw new APIMgtResourceNotFoundException("Subscriptions not allowed on APIs in the state: " +
api.getStatus());
}
}
@Override
public SubscriptionResponse addSubscription(APIIdentifier identifier, String userId, int applicationId,
String groupId) throws APIManagementException {
boolean isValid = validateApplication(userId, applicationId, groupId);
if (!isValid) {
log.error("Application " + applicationId + " is not accessible to user " + userId);
throw new APIManagementException("Application is not accessible to user " + userId);
}
return addSubscription(identifier, userId, applicationId);
}
/**
* Check whether the application is accessible to the specified user
* @param userId username
* @param applicationId application ID
* @param groupId GroupId list of the application
* @return true if the application is accessible by the specified user
*/
private boolean validateApplication(String userId, int applicationId, String groupId) {
try {
return apiMgtDAO.isAppAllowed(applicationId, userId, groupId);
} catch (APIManagementException e) {
log.error("Error occurred while getting user group id for user: " + userId, e);
}
return false;
}
@Override
public String getSubscriptionStatusById(int subscriptionId) throws APIManagementException {
return apiMgtDAO.getSubscriptionStatusById(subscriptionId);
}
@Override
public void removeSubscription(APIIdentifier identifier, String userId, int applicationId)
throws APIManagementException {
boolean isTenantFlowStarted = false;
String providerTenantDomain = MultitenantUtils.getTenantDomain(APIUtil.
replaceEmailDomainBack(identifier.getProviderName()));
String applicationName = apiMgtDAO.getApplicationNameFromId(applicationId);
try {
if (providerTenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME
.equals(providerTenantDomain)) {
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(providerTenantDomain, true);
isTenantFlowStarted = true;
}
API api = getAPI(identifier);
SubscriptionWorkflowDTO workflowDTO;
WorkflowExecutor createSubscriptionWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_CREATION);
WorkflowExecutor removeSubscriptionWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_DELETION);
String workflowExtRef = apiMgtDAO.getExternalWorkflowReferenceForSubscription(identifier, applicationId);
// in a normal flow workflowExtRef is null when workflows are not enabled
if (workflowExtRef == null) {
workflowDTO = new SubscriptionWorkflowDTO();
} else {
workflowDTO = (SubscriptionWorkflowDTO) apiMgtDAO.retrieveWorkflow(workflowExtRef);
// set tiername to the workflowDTO only when workflows are enabled
SubscribedAPI subscription = apiMgtDAO
.getSubscriptionById(Integer.parseInt(workflowDTO.getWorkflowReference()));
workflowDTO.setTierName(subscription.getTier().getName());
}
workflowDTO.setApiProvider(identifier.getProviderName());
workflowDTO.setApiContext(api.getContext());
workflowDTO.setApiName(identifier.getApiName());
workflowDTO.setApiVersion(identifier.getVersion());
workflowDTO.setApplicationName(applicationName);
workflowDTO.setTenantDomain(tenantDomain);
workflowDTO.setTenantId(tenantId);
workflowDTO.setExternalWorkflowReference(workflowExtRef);
workflowDTO.setSubscriber(userId);
workflowDTO.setCallbackUrl(removeSubscriptionWFExecutor.getCallbackURL());
workflowDTO.setApplicationId(applicationId);
String status = apiMgtDAO.getSubscriptionStatus(identifier, applicationId);
if (APIConstants.SubscriptionStatus.ON_HOLD.equals(status)) {
try {
createSubscriptionWFExecutor.cleanUpPendingTask(workflowExtRef);
} catch (WorkflowException ex) {
// failed cleanup processes are ignored to prevent failing the deletion process
log.warn("Failed to clean pending subscription approval task");
}
}
// update attributes of the new remove workflow to be created
workflowDTO.setStatus(WorkflowStatus.CREATED);
workflowDTO.setWorkflowType(WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_DELETION);
workflowDTO.setCreatedTime(System.currentTimeMillis());
workflowDTO.setExternalWorkflowReference(removeSubscriptionWFExecutor.generateUUID());
removeSubscriptionWFExecutor.execute(workflowDTO);
JSONObject subsLogObject = new JSONObject();
subsLogObject.put(APIConstants.AuditLogConstants.API_NAME, identifier.getApiName());
subsLogObject.put(APIConstants.AuditLogConstants.PROVIDER, identifier.getProviderName());
subsLogObject.put(APIConstants.AuditLogConstants.APPLICATION_ID, applicationId);
subsLogObject.put(APIConstants.AuditLogConstants.APPLICATION_NAME, applicationName);
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.SUBSCRIPTION, subsLogObject.toString(),
APIConstants.AuditLogConstants.DELETED, this.username);
} catch (WorkflowException e) {
String errorMsg = "Could not execute Workflow, " + WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_DELETION +
" for apiID " + identifier.getApiName();
handleException(errorMsg, e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
if (APIUtil.isAPIGatewayKeyCacheEnabled()) {
invalidateCachedKeys(applicationId);
}
if (log.isDebugEnabled()) {
String logMessage = "API Name: " + identifier.getApiName() + ", API Version " +
identifier.getVersion() + " subscription removed from app " + applicationName + " by " + userId;
log.debug(logMessage);
}
}
@Override
public void removeSubscription(APIIdentifier identifier, String userId, int applicationId, String groupId) throws
APIManagementException {
//check application is viewable to logged user
boolean isValid = validateApplication(userId, applicationId, groupId);
if (!isValid) {
log.error("Application " + applicationId + " is not accessible to user " + userId);
throw new APIManagementException("Application is not accessible to user " + userId);
}
removeSubscription(identifier, userId, applicationId);
}
/**
* Removes a subscription specified by SubscribedAPI object
*
* @param subscription SubscribedAPI object
* @throws APIManagementException
*/
@Override
public void removeSubscription(SubscribedAPI subscription) throws APIManagementException {
String uuid = subscription.getUUID();
SubscribedAPI subscribedAPI = apiMgtDAO.getSubscriptionByUUID(uuid);
if (subscribedAPI != null) {
Application application = subscribedAPI.getApplication();
APIIdentifier identifier = subscribedAPI.getApiId();
String userId = application.getSubscriber().getName();
removeSubscription(identifier, userId, application.getId());
if (log.isDebugEnabled()) {
String appName = application.getName();
String logMessage =
"API Name: " + identifier.getApiName() + ", API Version " + identifier.getVersion() +
" subscription (uuid : " + uuid + ") removed from app " + appName;
log.debug(logMessage);
}
} else {
throw new APIManagementException("Subscription for UUID:" + uuid +" does not exist.");
}
}
/**
*
* @param applicationId Application ID related cache keys to be cleared
* @throws APIManagementException
*/
private void invalidateCachedKeys(int applicationId) throws APIManagementException {
CacheInvalidator.getInstance().invalidateCacheForApp(applicationId);
}
@Override
public void removeSubscriber(APIIdentifier identifier, String userId)
throws APIManagementException {
throw new UnsupportedOperationException("Unsubscribe operation is not yet implemented");
}
@Override
public void updateSubscriptions(APIIdentifier identifier, String userId, int applicationId)
throws APIManagementException {
API api = getAPI(identifier);
apiMgtDAO.updateSubscriptions(identifier, api.getContext(), applicationId, userId);
}
@Override
public void addComment(APIIdentifier identifier, String commentText, String user) throws APIManagementException {
apiMgtDAO.addComment(identifier, commentText, user);
}
@Override
public org.wso2.carbon.apimgt.api.model.Comment[] getComments(APIIdentifier identifier)
throws APIManagementException {
return apiMgtDAO.getComments(identifier);
}
/**
* Add a new Application from the store.
* @param application - {@link org.wso2.carbon.apimgt.api.model.Application}
* @param userId - {@link String}
* @return {@link String}
*/
@Override
public int addApplication(Application application, String userId)
throws APIManagementException {
if (application.getName() != null && (application.getName().length() != application.getName().trim().length())) {
handleApplicationNameContainSpacesException("Application name " +
"cannot contain leading or trailing white spaces");
}
String regex = "^[a-zA-Z0-9 ._-]*$";
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(application.getName());
if (!matcher.find()) {
handleApplicationNameContainsInvalidCharactersException("Application name contains invalid characters");
}
if (APIUtil.isApplicationExist(userId, application.getName(), application.getGroupId())) {
handleResourceAlreadyExistsException(
"A duplicate application already exists by the name - " + application.getName());
}
//check whether callback url is empty and set null
if (StringUtils.isBlank(application.getCallbackUrl())) {
application.setCallbackUrl(null);
}
int applicationId = apiMgtDAO.addApplication(application, userId);
JSONObject appLogObject = new JSONObject();
appLogObject.put(APIConstants.AuditLogConstants.NAME, application.getName());
appLogObject.put(APIConstants.AuditLogConstants.TIER, application.getTier());
appLogObject.put(APIConstants.AuditLogConstants.CALLBACK, application.getCallbackUrl());
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.CREATED, this.username);
boolean isTenantFlowStarted = false;
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
isTenantFlowStarted = startTenantFlowForTenantDomain(tenantDomain);
}
try {
WorkflowExecutor appCreationWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_CREATION);
ApplicationWorkflowDTO appWFDto = new ApplicationWorkflowDTO();
appWFDto.setApplication(application);
appWFDto.setExternalWorkflowReference(appCreationWFExecutor.generateUUID());
appWFDto.setWorkflowReference(String.valueOf(applicationId));
appWFDto.setWorkflowType(WorkflowConstants.WF_TYPE_AM_APPLICATION_CREATION);
appWFDto.setCallbackUrl(appCreationWFExecutor.getCallbackURL());
appWFDto.setStatus(WorkflowStatus.CREATED);
appWFDto.setTenantDomain(tenantDomain);
appWFDto.setTenantId(tenantId);
appWFDto.setUserName(userId);
appWFDto.setCreatedTime(System.currentTimeMillis());
appCreationWFExecutor.execute(appWFDto);
} catch (WorkflowException e) {
//If the workflow execution fails, roll back transaction by removing the application entry.
application.setId(applicationId);
apiMgtDAO.deleteApplication(application);
log.error("Unable to execute Application Creation Workflow", e);
handleException("Unable to execute Application Creation Workflow", e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
if (log.isDebugEnabled()) {
log.debug("Application Name: " + application.getName() +" added successfully.");
}
return applicationId;
}
/** Updates an Application identified by its id
*
* @param application Application object to be updated
* @throws APIManagementException
*/
@Override
public void updateApplication(Application application) throws APIManagementException {
Application existingApp;
String uuid = application.getUUID();
if (!StringUtils.isEmpty(uuid)) {
existingApp = apiMgtDAO.getApplicationByUUID(uuid);
if (existingApp != null) {
Set<APIKey> keys = getApplicationKeys(existingApp.getId());
for (APIKey key : keys) {
existingApp.addKey(key);
}
}
application.setId(existingApp.getId());
} else {
existingApp = apiMgtDAO.getApplicationById(application.getId());
}
if (existingApp != null && APIConstants.ApplicationStatus.APPLICATION_CREATED.equals(existingApp.getStatus())) {
throw new APIManagementException("Cannot update the application while it is INACTIVE");
}
boolean isCaseInsensitiveComparisons = Boolean.parseBoolean(getAPIManagerConfiguration().
getFirstProperty(APIConstants.API_STORE_FORCE_CI_COMPARISIONS));
boolean isUserAppOwner;
if (isCaseInsensitiveComparisons) {
isUserAppOwner = application.getSubscriber().getName().
equalsIgnoreCase(existingApp.getSubscriber().getName());
} else {
isUserAppOwner = application.getSubscriber().getName().equals(existingApp.getSubscriber().getName());
}
if (!isUserAppOwner) {
throw new APIManagementException("user: " + application.getSubscriber().getName() + ", " +
"attempted to update application owned by: " + existingApp.getSubscriber().getName());
}
if (application.getName() != null && (application.getName().length() != application.getName().trim().length())) {
handleApplicationNameContainSpacesException("Application name " +
"cannot contain leading or trailing white spaces");
}
String regex = "^[a-zA-Z0-9 ._-]*$";
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(application.getName());
if (!matcher.find()) {
handleApplicationNameContainsInvalidCharactersException("Application name contains invalid characters");
}
apiMgtDAO.updateApplication(application);
if (log.isDebugEnabled()) {
log.debug("Successfully updated the Application: " + application.getId() +" in the database.");
}
JSONObject appLogObject = new JSONObject();
appLogObject.put(APIConstants.AuditLogConstants.NAME, application.getName());
appLogObject.put(APIConstants.AuditLogConstants.TIER, application.getTier());
appLogObject.put(APIConstants.AuditLogConstants.STATUS, existingApp != null ? existingApp.getStatus() : "");
appLogObject.put(APIConstants.AuditLogConstants.CALLBACK, application.getCallbackUrl());
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.UPDATED, this.username);
try {
invalidateCachedKeys(application.getId());
} catch (APIManagementException ignore) {
//Log and ignore since we do not want to throw exceptions to the front end due to cache invalidation failure.
log.warn("Failed to invalidate Gateway Cache " + ignore.getMessage(), ignore);
}
}
/**
* Function to remove an Application from the API Store
*
* @param application - The Application Object that represents the Application
* @param username
* @throws APIManagementException
*/
@Override
public void removeApplication(Application application, String username) throws APIManagementException {
String uuid = application.getUUID();
if (application.getId() == 0 && !StringUtils.isEmpty(uuid)) {
application = apiMgtDAO.getApplicationByUUID(uuid);
if (application != null) {
Set<APIKey> keys = getApplicationKeys(application.getId());
for (APIKey key : keys) {
application.addKey(key);
}
}
}
boolean isTenantFlowStarted = false;
int applicationId = application.getId();
boolean isCaseInsensitiveComparisons = Boolean.parseBoolean(getAPIManagerConfiguration().
getFirstProperty(APIConstants.API_STORE_FORCE_CI_COMPARISIONS));
boolean isUserAppOwner;
if (isCaseInsensitiveComparisons) {
isUserAppOwner = application.getSubscriber().getName().equalsIgnoreCase(username);
} else {
isUserAppOwner = application.getSubscriber().getName().equals(username);
}
if (!isUserAppOwner) {
throw new APIManagementException("user: " + username + ", " +
"attempted to remove application owned by: " + application.getSubscriber().getName());
}
try {
String workflowExtRef;
ApplicationWorkflowDTO workflowDTO;
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
PrivilegedCarbonContext.startTenantFlow();
isTenantFlowStarted = true;
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
}
WorkflowExecutor createApplicationWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_CREATION);
WorkflowExecutor createSubscriptionWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_CREATION);
WorkflowExecutor createProductionRegistrationWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_PRODUCTION);
WorkflowExecutor createSandboxRegistrationWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_SANDBOX);
WorkflowExecutor removeApplicationWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_DELETION);
workflowExtRef = apiMgtDAO.getExternalWorkflowReferenceByApplicationID(application.getId());
// in a normal flow workflowExtRef is null when workflows are not enabled
if (workflowExtRef == null) {
workflowDTO = new ApplicationWorkflowDTO();
} else {
workflowDTO = (ApplicationWorkflowDTO) apiMgtDAO.retrieveWorkflow(workflowExtRef);
}
workflowDTO.setApplication(application);
workflowDTO.setCallbackUrl(removeApplicationWFExecutor.getCallbackURL());
workflowDTO.setUserName(this.username);
workflowDTO.setTenantDomain(tenantDomain);
workflowDTO.setTenantId(tenantId);
// Remove from cache first since we won't be able to find active access tokens
// once the application is removed.
invalidateCachedKeys(application.getId());
// clean up pending subscription tasks
Set<Integer> pendingSubscriptions = apiMgtDAO.getPendingSubscriptionsByApplicationId(applicationId);
for (int subscription : pendingSubscriptions) {
try {
workflowExtRef = apiMgtDAO.getExternalWorkflowReferenceForSubscription(subscription);
createSubscriptionWFExecutor.cleanUpPendingTask(workflowExtRef);
} catch (APIManagementException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to get external workflow reference for subscription " + subscription);
} catch (WorkflowException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to clean pending subscription approval task: " + subscription);
}
}
// cleanup pending application registration tasks
String productionKeyStatus = apiMgtDAO
.getRegistrationApprovalState(applicationId, APIConstants.API_KEY_TYPE_PRODUCTION);
String sandboxKeyStatus = apiMgtDAO
.getRegistrationApprovalState(applicationId, APIConstants.API_KEY_TYPE_SANDBOX);
if (WorkflowStatus.CREATED.toString().equals(productionKeyStatus)) {
try {
workflowExtRef = apiMgtDAO
.getRegistrationWFReference(applicationId, APIConstants.API_KEY_TYPE_PRODUCTION);
createProductionRegistrationWFExecutor.cleanUpPendingTask(workflowExtRef);
} catch (APIManagementException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to get external workflow reference for production key of application "
+ applicationId);
} catch (WorkflowException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to clean pending production key approval task of " + applicationId);
}
}
if (WorkflowStatus.CREATED.toString().equals(sandboxKeyStatus)) {
try {
workflowExtRef = apiMgtDAO
.getRegistrationWFReference(applicationId, APIConstants.API_KEY_TYPE_SANDBOX);
createSandboxRegistrationWFExecutor.cleanUpPendingTask(workflowExtRef);
} catch (APIManagementException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to get external workflow reference for sandbox key of application "
+ applicationId);
} catch (WorkflowException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to clean pending sandbox key approval task of " + applicationId);
}
}
if (workflowExtRef != null) {
try {
createApplicationWFExecutor.cleanUpPendingTask(workflowExtRef);
} catch (WorkflowException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to clean pending application approval task of " + applicationId);
}
}
// update attributes of the new remove workflow to be created
workflowDTO.setStatus(WorkflowStatus.CREATED);
workflowDTO.setCreatedTime(System.currentTimeMillis());
workflowDTO.setWorkflowType(WorkflowConstants.WF_TYPE_AM_APPLICATION_DELETION);
workflowDTO.setExternalWorkflowReference(removeApplicationWFExecutor.generateUUID());
removeApplicationWFExecutor.execute(workflowDTO);
JSONObject appLogObject = new JSONObject();
appLogObject.put(APIConstants.AuditLogConstants.NAME, application.getName());
appLogObject.put(APIConstants.AuditLogConstants.TIER, application.getTier());
appLogObject.put(APIConstants.AuditLogConstants.CALLBACK, application.getCallbackUrl());
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.DELETED, this.username);
} catch (WorkflowException e) {
String errorMsg = "Could not execute Workflow, " + WorkflowConstants.WF_TYPE_AM_APPLICATION_DELETION + " " +
"for applicationID " + application.getId();
handleException(errorMsg, e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
if (log.isDebugEnabled()) {
String logMessage = "Application Name: " + application.getName() + " successfully removed";
log.debug(logMessage);
}
}
/**
* This method specifically implemented for REST API by removing application and data access logic
* from host object layer. So as per new implementation we need to pass requested scopes to this method
* as tokenScope. So we will do scope related other logic here in this method.
* So host object should only pass required 9 parameters.
* */
@Override
public Map<String, Object> requestApprovalForApplicationRegistration(String userId, String applicationName,
String tokenType, String callbackUrl,
String[] allowedDomains, String validityTime,
String tokenScope, String groupingId,
String jsonString
)
throws APIManagementException {
boolean isTenantFlowStarted = false;
String tenantDomain = MultitenantUtils.getTenantDomain(userId);
int tenantId = MultitenantConstants.INVALID_TENANT_ID;
try {
tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(tenantDomain);
} catch (UserStoreException e) {
handleException("Unable to retrieve the tenant information of the current user.", e);
}
//checking for authorized scopes
Set<Scope> scopeSet = new LinkedHashSet<Scope>();
List<Scope> authorizedScopes = new ArrayList<Scope>();
String authScopeString;
if (tokenScope != null && tokenScope.length() != 0 &&
!APIConstants.OAUTH2_DEFAULT_SCOPE.equals(tokenScope)) {
scopeSet.addAll(getScopesByScopeKeys(tokenScope, tenantId));
authorizedScopes = getAllowedScopesForUserApplication(userId, scopeSet);
}
if (!authorizedScopes.isEmpty()) {
Set<Scope> authorizedScopeSet = new HashSet<Scope>(authorizedScopes);
StringBuilder scopeBuilder = new StringBuilder();
for (Scope scope : authorizedScopeSet) {
scopeBuilder.append(scope.getKey()).append(' ');
}
authScopeString = scopeBuilder.toString();
} else {
authScopeString = APIConstants.OAUTH2_DEFAULT_SCOPE;
}
try {
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
isTenantFlowStarted = startTenantFlowForTenantDomain(tenantDomain);
}
// initiate WorkflowExecutor
WorkflowExecutor appRegistrationWorkflow = null;
// initiate ApplicationRegistrationWorkflowDTO
ApplicationRegistrationWorkflowDTO appRegWFDto = null;
ApplicationKeysDTO appKeysDto = new ApplicationKeysDTO();
// get APIM application by Application Name and userId.
Application application = ApplicationUtils.retrieveApplication(applicationName, userId, groupingId);
boolean isCaseInsensitiveComparisons = Boolean.parseBoolean(getAPIManagerConfiguration().
getFirstProperty(APIConstants.API_STORE_FORCE_CI_COMPARISIONS));
boolean isUserAppOwner;
if (isCaseInsensitiveComparisons) {
isUserAppOwner = application.getSubscriber().getName().equalsIgnoreCase(userId);
} else {
isUserAppOwner = application.getSubscriber().getName().equals(userId);
}
if (!isUserAppOwner) {
throw new APIManagementException("user: " + application.getSubscriber().getName() + ", " +
"attempted to generate tokens for application owned by: " + userId);
}
// if its a PRODUCTION application.
if (APIConstants.API_KEY_TYPE_PRODUCTION.equals(tokenType)) {
// initiate workflow type. By default simple work flow will be
// executed.
appRegistrationWorkflow =
getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_PRODUCTION);
appRegWFDto =
(ApplicationRegistrationWorkflowDTO) WorkflowExecutorFactory.getInstance()
.createWorkflowDTO(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_PRODUCTION);
}// if it is a sandBox application.
else if (APIConstants.API_KEY_TYPE_SANDBOX.equals(tokenType)) { // if
// its
// a
// SANDBOX
// application.
appRegistrationWorkflow =
getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_SANDBOX);
appRegWFDto =
(ApplicationRegistrationWorkflowDTO) WorkflowExecutorFactory.getInstance()
.createWorkflowDTO(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_SANDBOX);
} else {
throw new APIManagementException("Invalid Token Type '" + tokenType + "' requested.");
}
//check whether callback url is empty and set null
if (StringUtils.isBlank(callbackUrl)) {
callbackUrl = null;
}
String applicationTokenType = application.getTokenType();
if (StringUtils.isEmpty(application.getTokenType())) {
applicationTokenType = APIConstants.DEFAULT_TOKEN_TYPE;
}
// Build key manager instance and create oAuthAppRequest by jsonString.
OAuthAppRequest request =
ApplicationUtils.createOauthAppRequest(applicationName, null,
callbackUrl, authScopeString, jsonString, applicationTokenType);
request.getOAuthApplicationInfo().addParameter(ApplicationConstants.VALIDITY_PERIOD, validityTime);
request.getOAuthApplicationInfo().addParameter(ApplicationConstants.APP_KEY_TYPE, tokenType);
request.getOAuthApplicationInfo().addParameter(ApplicationConstants.APP_CALLBACK_URL, callbackUrl);
// Setting request values in WorkflowDTO - In future we should keep
// Application/OAuthApplication related
// information in the respective entities not in the workflowDTO.
appRegWFDto.setStatus(WorkflowStatus.CREATED);
appRegWFDto.setCreatedTime(System.currentTimeMillis());
appRegWFDto.setTenantDomain(tenantDomain);
appRegWFDto.setTenantId(tenantId);
appRegWFDto.setExternalWorkflowReference(appRegistrationWorkflow.generateUUID());
appRegWFDto.setWorkflowReference(appRegWFDto.getExternalWorkflowReference());
appRegWFDto.setApplication(application);
request.setMappingId(appRegWFDto.getWorkflowReference());
if (!application.getSubscriber().getName().equals(userId)) {
appRegWFDto.setUserName(application.getSubscriber().getName());
} else {
appRegWFDto.setUserName(userId);
}
appRegWFDto.setCallbackUrl(appRegistrationWorkflow.getCallbackURL());
appRegWFDto.setAppInfoDTO(request);
appRegWFDto.setDomainList(allowedDomains);
appRegWFDto.setKeyDetails(appKeysDto);
appRegistrationWorkflow.execute(appRegWFDto);
Map<String, Object> keyDetails = new HashMap<String, Object>();
keyDetails.put("keyState", appRegWFDto.getStatus().toString());
OAuthApplicationInfo applicationInfo = appRegWFDto.getApplicationInfo();
if (applicationInfo != null) {
keyDetails.put("consumerKey", applicationInfo.getClientId());
keyDetails.put("consumerSecret", applicationInfo.getClientSecret());
keyDetails.put("appDetails", applicationInfo.getJsonString());
}
// There can be instances where generating the Application Token is
// not required. In those cases,
// token info will have nothing.
AccessTokenInfo tokenInfo = appRegWFDto.getAccessTokenInfo();
if (tokenInfo != null) {
keyDetails.put("accessToken", tokenInfo.getAccessToken());
keyDetails.put("validityTime", tokenInfo.getValidityPeriod());
keyDetails.put("tokenDetails", tokenInfo.getJSONString());
keyDetails.put("tokenScope", tokenInfo.getScopes());
}
JSONObject appLogObject = new JSONObject();
appLogObject.put("Generated keys for application", application.getName());
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.UPDATED, this.username);
return keyDetails;
} catch (WorkflowException e) {
log.error("Could not execute Workflow", e);
throw new APIManagementException(e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
}
@Override
public Map<String, Object> requestApprovalForApplicationRegistrationByApplicationId(
Map<String, Object> appInfo) throws APIManagementException {
if (appInfo == null || appInfo.isEmpty()) {
log.error("Application information is not provided to request approval For Application Registration");
return new HashMap<String, Object>(0);
}
boolean isTenantFlowStarted = false;
String username = appInfo.get("username").toString();
String scopes = appInfo.get("scopes").toString();
String applicationName = appInfo.get("applicationName").toString();
String groupingId = appInfo.get("groupingId").toString();
String tokenType = appInfo.get("tokenType").toString();
String callbackUrl = appInfo.get("callbackUrl").toString();
String jsonParams = appInfo.get("jsonParams").toString();
String[] allowedDomains = (String[]) appInfo.get("allowedDomains");
String validityTime = appInfo.get("validityPeriod").toString();
int applicationId = Integer.valueOf(appInfo.get("applicationId").toString());
String tenantDomain = MultitenantUtils.getTenantDomain(username);
int tenantId = MultitenantConstants.INVALID_TENANT_ID;
try {
tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(tenantDomain);
} catch (UserStoreException e) {
String msg = "Unable to retrieve the tenant information of the current user.";
log.error(msg, e);
throw new APIManagementException(msg, e);
}
//checking for authorized scopes
Set<Scope> scopeSet = new LinkedHashSet<Scope>();
List<Scope> authorizedScopes = new ArrayList<Scope>();
String authScopeString;
if (scopes != null && scopes.length() != 0 && !APIConstants.OAUTH2_DEFAULT_SCOPE.equals(scopes)) {
scopeSet.addAll(getScopesByScopeKeys(scopes, tenantId));
authorizedScopes = getAllowedScopesForUserApplication(username, scopeSet);
}
if (!authorizedScopes.isEmpty()) {
StringBuilder scopeBuilder = new StringBuilder();
for (Scope scope : authorizedScopes) {
scopeBuilder.append(scope.getKey()).append(' ');
}
authScopeString = scopeBuilder.toString();
} else {
authScopeString = APIConstants.OAUTH2_DEFAULT_SCOPE;
}
try {
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
isTenantFlowStarted = startTenantFlowForTenantDomain(tenantDomain);
}
// initiate WorkflowExecutor
WorkflowExecutor appRegistrationWorkflow = null;
// initiate ApplicationRegistrationWorkflowDTO
ApplicationRegistrationWorkflowDTO appRegWFDto = null;
ApplicationKeysDTO appKeysDto = new ApplicationKeysDTO();
// get APIM application by Application Id.
Application application = ApplicationUtils.retrieveApplicationById(applicationId);
// if its a PRODUCTION application.
if (APIConstants.API_KEY_TYPE_PRODUCTION.equals(tokenType)) {
// initiate workflow type. By default simple work flow will be
// executed.
appRegistrationWorkflow = getWorkflowExecutor(
WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_PRODUCTION);
appRegWFDto = (ApplicationRegistrationWorkflowDTO) WorkflowExecutorFactory.getInstance()
.createWorkflowDTO(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_PRODUCTION);
}// if it is a sandBox application.
else if (APIConstants.API_KEY_TYPE_SANDBOX.equals(tokenType)) {
appRegistrationWorkflow = getWorkflowExecutor(
WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_SANDBOX);
appRegWFDto = (ApplicationRegistrationWorkflowDTO) WorkflowExecutorFactory.getInstance()
.createWorkflowDTO(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_SANDBOX);
} else {
throw new APIManagementException("Invalid Token Type '" + tokenType + "' requested.");
}
//check whether callback url is empty and set null
if (StringUtils.isBlank(callbackUrl)) {
callbackUrl = null;
}
String applicationTokenType = application.getTokenType();
if (StringUtils.isEmpty(application.getTokenType())) {
applicationTokenType = APIConstants.DEFAULT_TOKEN_TYPE;
}
// Build key manager instance and create oAuthAppRequest by jsonString.
OAuthAppRequest request = ApplicationUtils
.createOauthAppRequest(applicationName, null, callbackUrl, authScopeString, jsonParams,
applicationTokenType);
request.getOAuthApplicationInfo().addParameter(ApplicationConstants.VALIDITY_PERIOD, validityTime);
request.getOAuthApplicationInfo().addParameter(ApplicationConstants.APP_KEY_TYPE, tokenType);
request.getOAuthApplicationInfo().addParameter(ApplicationConstants.APP_CALLBACK_URL, callbackUrl);
// Setting request values in WorkflowDTO - In future we should keep
// Application/OAuthApplication related
// information in the respective entities not in the workflowDTO.
appRegWFDto.setStatus(WorkflowStatus.CREATED);
appRegWFDto.setCreatedTime(System.currentTimeMillis());
appRegWFDto.setTenantDomain(tenantDomain);
appRegWFDto.setTenantId(tenantId);
appRegWFDto.setExternalWorkflowReference(appRegistrationWorkflow.generateUUID());
appRegWFDto.setWorkflowReference(appRegWFDto.getExternalWorkflowReference());
appRegWFDto.setApplication(application);
request.setMappingId(appRegWFDto.getWorkflowReference());
if (!application.getSubscriber().getName().equals(username)) {
appRegWFDto.setUserName(application.getSubscriber().getName());
} else {
appRegWFDto.setUserName(username);
}
appRegWFDto.setCallbackUrl(appRegistrationWorkflow.getCallbackURL());
appRegWFDto.setAppInfoDTO(request);
appRegWFDto.setDomainList(allowedDomains);
appRegWFDto.setKeyDetails(appKeysDto);
appRegistrationWorkflow.execute(appRegWFDto);
Map<String, Object> keyDetails = new HashMap<String, Object>();
keyDetails.put("keyState", appRegWFDto.getStatus().toString());
OAuthApplicationInfo applicationInfo = appRegWFDto.getApplicationInfo();
if (applicationInfo != null) {
keyDetails.put("consumerKey", applicationInfo.getClientId());
keyDetails.put("consumerSecret", applicationInfo.getClientSecret());
keyDetails.put("appDetails", applicationInfo.getJsonString());
}
// There can be instances where generating the Application Token is
// not required. In those cases,
// token info will have nothing.
AccessTokenInfo tokenInfo = appRegWFDto.getAccessTokenInfo();
if (tokenInfo != null) {
keyDetails.put("accessToken", tokenInfo.getAccessToken());
keyDetails.put("validityTime", tokenInfo.getValidityPeriod());
keyDetails.put("tokenDetails", tokenInfo.getJSONString());
keyDetails.put("tokenScope", tokenInfo.getScopes());
}
JSONObject appLogObject = new JSONObject();
appLogObject.put("Generated keys for application", application.getName());
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.UPDATED, this.username);
return keyDetails;
} catch (WorkflowException e) {
log.error("Could not execute Workflow", e);
throw new APIManagementException("Could not execute Workflow", e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
}
private static List<Scope> getAllowedScopesForUserApplication(String username,
Set<Scope> reqScopeSet) {
String[] userRoles = null;
org.wso2.carbon.user.api.UserStoreManager userStoreManager = null;
List<Scope> authorizedScopes = new ArrayList<Scope>();
try {
RealmService realmService = ServiceReferenceHolder.getInstance().getRealmService();
int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(MultitenantUtils.getTenantDomain(username));
userStoreManager = realmService.getTenantUserRealm(tenantId).getUserStoreManager();
userRoles = userStoreManager.getRoleListOfUser(MultitenantUtils.getTenantAwareUsername(username));
} catch (org.wso2.carbon.user.api.UserStoreException e) {
// Log and return since we do not want to stop issuing the token in
// case of scope validation failures.
log.error("Error when getting the tenant's UserStoreManager or when getting roles of user ", e);
}
List<String> userRoleList;
if (userRoles != null) {
userRoleList = new ArrayList<String>(Arrays.asList(userRoles));
} else {
userRoleList = Collections.emptyList();
}
//Iterate the requested scopes list.
for (Scope scope : reqScopeSet) {
//Get the set of roles associated with the requested scope.
String roles = scope.getRoles();
//If the scope has been defined in the context of the App and if roles have been defined for the scope
if (roles != null && roles.length() != 0) {
List<String> roleList =
new ArrayList<String>(Arrays.asList(roles.replaceAll(" ", EMPTY_STRING).split(",")));
//Check if user has at least one of the roles associated with the scope
roleList.retainAll(userRoleList);
if (!roleList.isEmpty()) {
authorizedScopes.add(scope);
}
}
}
return authorizedScopes;
}
@Override
public Map<String, String> completeApplicationRegistration(String userId, String applicationName, String tokenType,
String tokenScope, String groupingId)
throws APIManagementException {
Application application = apiMgtDAO.getApplicationByName(applicationName, userId, groupingId);
String status = apiMgtDAO.getRegistrationApprovalState(application.getId(), tokenType);
Map<String, String> keyDetails = null;
if (!application.getSubscriber().getName().equals(userId)) {
userId = application.getSubscriber().getName();
}
String workflowReference = apiMgtDAO.getWorkflowReference(applicationName, userId);
if (workflowReference != null) {
WorkflowDTO workflowDTO = null;
// Creating workflowDTO for the correct key type.
if (APIConstants.API_KEY_TYPE_PRODUCTION.equals(tokenType)) {
workflowDTO = WorkflowExecutorFactory.getInstance().createWorkflowDTO(
WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_PRODUCTION);
} else if (APIConstants.API_KEY_TYPE_SANDBOX.equals(tokenType)) {
workflowDTO = WorkflowExecutorFactory.getInstance().createWorkflowDTO(
WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_SANDBOX);
}
if (workflowDTO != null) {
// Set the workflow reference in the workflow dto and the populate method will fill in other details
// using the persisted request.
ApplicationRegistrationWorkflowDTO registrationWorkflowDTO = (ApplicationRegistrationWorkflowDTO)
workflowDTO;
registrationWorkflowDTO.setExternalWorkflowReference(workflowReference);
if (APIConstants.AppRegistrationStatus.REGISTRATION_APPROVED.equals(status)) {
apiMgtDAO.populateAppRegistrationWorkflowDTO(registrationWorkflowDTO);
try {
AbstractApplicationRegistrationWorkflowExecutor.dogenerateKeysForApplication
(registrationWorkflowDTO);
AccessTokenInfo tokenInfo = registrationWorkflowDTO.getAccessTokenInfo();
OAuthApplicationInfo oauthApp = registrationWorkflowDTO.getApplicationInfo();
keyDetails = new HashMap<String, String>();
if (tokenInfo != null) {
keyDetails.put("accessToken", tokenInfo.getAccessToken());
keyDetails.put("validityTime", Long.toString(tokenInfo.getValidityPeriod()));
keyDetails.put("tokenDetails", tokenInfo.getJSONString());
}
keyDetails.put("consumerKey", oauthApp.getClientId());
keyDetails.put("consumerSecret", oauthApp.getClientSecret());
keyDetails.put("appDetails", oauthApp.getJsonString());
} catch (APIManagementException e) {
APIUtil.handleException("Error occurred while Creating Keys.", e);
}
}
}
}
return keyDetails;
}
@Override
public Map<String, String> completeApplicationRegistration(String userId, int applicationId,
String tokenType, String tokenScope, String groupingId) throws APIManagementException {
Application application = apiMgtDAO.getApplicationById(applicationId);
String status = apiMgtDAO.getRegistrationApprovalState(application.getId(), tokenType);
Map<String, String> keyDetails = null;
if (!application.getSubscriber().getName().equals(userId)) {
userId = application.getSubscriber().getName();
}
//todo get workflow reference by appId
String workflowReference = apiMgtDAO.getWorkflowReferenceByApplicationId(application.getId(), userId);
if (workflowReference != null) {
WorkflowDTO workflowDTO = null;
// Creating workflowDTO for the correct key type.
if (APIConstants.API_KEY_TYPE_PRODUCTION.equals(tokenType)) {
workflowDTO = WorkflowExecutorFactory.getInstance()
.createWorkflowDTO(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_PRODUCTION);
} else if (APIConstants.API_KEY_TYPE_SANDBOX.equals(tokenType)) {
workflowDTO = WorkflowExecutorFactory.getInstance()
.createWorkflowDTO(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_SANDBOX);
}
if (workflowDTO != null) {
// Set the workflow reference in the workflow dto and the populate method will fill in other details
// using the persisted request.
ApplicationRegistrationWorkflowDTO registrationWorkflowDTO = (ApplicationRegistrationWorkflowDTO) workflowDTO;
registrationWorkflowDTO.setExternalWorkflowReference(workflowReference);
if (APIConstants.AppRegistrationStatus.REGISTRATION_APPROVED.equals(status)) {
apiMgtDAO.populateAppRegistrationWorkflowDTO(registrationWorkflowDTO);
try {
AbstractApplicationRegistrationWorkflowExecutor
.dogenerateKeysForApplication(registrationWorkflowDTO);
AccessTokenInfo tokenInfo = registrationWorkflowDTO.getAccessTokenInfo();
OAuthApplicationInfo oauthApp = registrationWorkflowDTO.getApplicationInfo();
keyDetails = new HashMap<String, String>();
if (tokenInfo != null) {
keyDetails.put("accessToken", tokenInfo.getAccessToken());
keyDetails.put("validityTime", Long.toString(tokenInfo.getValidityPeriod()));
keyDetails.put("tokenDetails", tokenInfo.getJSONString());
}
keyDetails.put("consumerKey", oauthApp.getClientId());
keyDetails.put("consumerSecret", oauthApp.getClientSecret());
keyDetails.put("accessallowdomains", registrationWorkflowDTO.getDomainList());
keyDetails.put("appDetails", oauthApp.getJsonString());
} catch (APIManagementException e) {
APIUtil.handleException("Error occurred while Creating Keys.", e);
}
}
}
}
return keyDetails;
}
/**
*
* @param userId APIM subscriber user ID.
* @param ApplicationName APIM application name.
* @return
* @throws APIManagementException
*/
@Override
public Application getApplicationsByName(String userId, String ApplicationName, String groupingId) throws
APIManagementException {
Application application = apiMgtDAO.getApplicationByName(ApplicationName, userId,groupingId);
if (application != null) {
checkAppAttributes(application, userId);
}
application = apiMgtDAO.getApplicationWithOAuthApps(ApplicationName, userId, groupingId);
if (application != null) {
Set<APIKey> keys = getApplicationKeys(application.getId());
for (APIKey key : keys) {
application.addKey(key);
}
}
return application;
}
/**
* Returns the corresponding application given the Id
* @param id Id of the Application
* @return it will return Application corresponds to the id.
* @throws APIManagementException
*/
@Override
public Application getApplicationById(int id) throws APIManagementException {
Application application = apiMgtDAO.getApplicationById(id);
if (application != null) {
Set<APIKey> keys = getApplicationKeys(application.getId());
for (APIKey key : keys) {
application.addKey(key);
}
}
return application;
}
/*
* @see super.getApplicationById(int id, String userId, String groupId)
* */
@Override
public Application getApplicationById(int id, String userId, String groupId) throws APIManagementException {
Application application = apiMgtDAO.getApplicationById(id, userId, groupId);
if (application != null) {
checkAppAttributes(application, userId);
Set<APIKey> keys = getApplicationKeys(application.getId());
for (APIKey key : keys) {
application.addKey(key);
}
}
return application;
}
/** get the status of the Application creation process given the application Id
*
* @param applicationId Id of the Application
* @return
* @throws APIManagementException
*/
@Override
public String getApplicationStatusById(int applicationId) throws APIManagementException {
return apiMgtDAO.getApplicationStatusById(applicationId);
}
@Override
public boolean isApplicationTokenExists(String accessToken) throws APIManagementException {
return apiMgtDAO.isAccessTokenExists(accessToken);
}
@Override
public Set<SubscribedAPI> getSubscribedIdentifiers(Subscriber subscriber, APIIdentifier identifier, String groupingId)
throws APIManagementException {
Set<SubscribedAPI> subscribedAPISet = new HashSet<>();
Set<SubscribedAPI> subscribedAPIs = getSubscribedAPIs(subscriber, groupingId);
for (SubscribedAPI api : subscribedAPIs) {
if (api.getApiId().equals(identifier)) {
Set<APIKey> keys = getApplicationKeys(api.getApplication().getId());
for (APIKey key : keys) {
api.addKey(key);
}
subscribedAPISet.add(api);
}
}
return subscribedAPISet;
}
/**
* Returns a list of tiers denied
*
* @return Set<Tier>
*/
@Override
public Set<String> getDeniedTiers() throws APIManagementException {
// '0' is passed as argument whenever tenant id of logged in user is needed
return getDeniedTiers(0);
}
/**
* Returns a list of tiers denied
* @param apiProviderTenantId tenant id of API provider
* @return Set<Tier>
*/
@Override
public Set<String> getDeniedTiers(int apiProviderTenantId) throws APIManagementException {
Set<String> deniedTiers = new HashSet<String>();
String[] currentUserRoles;
if (apiProviderTenantId == 0) {
apiProviderTenantId = tenantId;
}
try {
if (apiProviderTenantId != 0) {
/* Get the roles of the Current User */
currentUserRoles = ((UserRegistry) ((UserAwareAPIConsumer) this).registry).
getUserRealm().getUserStoreManager().getRoleListOfUser(((UserRegistry) this.registry)
.getUserName());
Set<TierPermissionDTO> tierPermissions;
if (APIUtil.isAdvanceThrottlingEnabled()) {
tierPermissions = apiMgtDAO.getThrottleTierPermissions(apiProviderTenantId);
} else {
tierPermissions = apiMgtDAO.getTierPermissions(apiProviderTenantId);
}
for (TierPermissionDTO tierPermission : tierPermissions) {
String type = tierPermission.getPermissionType();
List<String> currentRolesList = new ArrayList<String>(Arrays.asList(currentUserRoles));
List<String> roles = new ArrayList<String>(Arrays.asList(tierPermission.getRoles()));
currentRolesList.retainAll(roles);
if (APIConstants.TIER_PERMISSION_ALLOW.equals(type)) {
/* Current User is not allowed for this Tier*/
if (currentRolesList.isEmpty()) {
deniedTiers.add(tierPermission.getTierName());
}
} else {
/* Current User is denied for this Tier*/
if (currentRolesList.size() > 0) {
deniedTiers.add(tierPermission.getTierName());
}
}
}
}
} catch (org.wso2.carbon.user.api.UserStoreException e) {
log.error("cannot retrieve user role list for tenant" + tenantDomain, e);
}
return deniedTiers;
}
@Override
public Set<TierPermission> getTierPermissions() throws APIManagementException {
Set<TierPermission> tierPermissions = new HashSet<TierPermission>();
if (tenantId != 0) {
Set<TierPermissionDTO> tierPermissionDtos;
if (APIUtil.isAdvanceThrottlingEnabled()) {
tierPermissionDtos = apiMgtDAO.getThrottleTierPermissions(tenantId);
} else {
tierPermissionDtos = apiMgtDAO.getTierPermissions(tenantId);
}
for (TierPermissionDTO tierDto : tierPermissionDtos) {
TierPermission tierPermission = new TierPermission(tierDto.getTierName());
tierPermission.setRoles(tierDto.getRoles());
tierPermission.setPermissionType(tierDto.getPermissionType());
tierPermissions.add(tierPermission);
}
}
return tierPermissions;
}
/**
* Check whether given Tier is denied for the user
*
* @param tierName
* @return
* @throws APIManagementException if failed to get the tiers
*/
@Override
public boolean isTierDeneid(String tierName) throws APIManagementException {
String[] currentUserRoles;
try {
if (tenantId != 0) {
/* Get the roles of the Current User */
currentUserRoles = ((UserRegistry) ((UserAwareAPIConsumer) this).registry).
getUserRealm().getUserStoreManager().getRoleListOfUser(((UserRegistry) this.registry).getUserName());
TierPermissionDTO tierPermission;
if(APIUtil.isAdvanceThrottlingEnabled()){
tierPermission = apiMgtDAO.getThrottleTierPermission(tierName, tenantId);
}else{
tierPermission = apiMgtDAO.getTierPermission(tierName, tenantId);
}
if (tierPermission == null) {
return false;
} else {
List<String> currentRolesList = new ArrayList<String>(Arrays.asList(currentUserRoles));
List<String> roles = new ArrayList<String>(Arrays.asList(tierPermission.getRoles()));
currentRolesList.retainAll(roles);
if (APIConstants.TIER_PERMISSION_ALLOW.equals(tierPermission.getPermissionType())) {
if (currentRolesList.isEmpty()) {
return true;
}
} else {
if (currentRolesList.size() > 0) {
return true;
}
}
}
}
} catch (org.wso2.carbon.user.api.UserStoreException e) {
log.error("cannot retrieve user role list for tenant" + tenantDomain, e);
}
return false;
}
private boolean isTenantDomainNotMatching(String tenantDomain) {
if (this.tenantDomain != null) {
return !(this.tenantDomain.equals(tenantDomain));
}
return true;
}
@Override
public Set<API> searchAPI(String searchTerm, String searchType, String tenantDomain)
throws APIManagementException {
return null;
}
public Set<Scope> getScopesBySubscribedAPIs(List<APIIdentifier> identifiers)
throws APIManagementException {
return apiMgtDAO.getScopesBySubscribedAPIs(identifiers);
}
public String getScopesByToken(String accessToken) throws APIManagementException {
return null;
}
public Set<Scope> getScopesByScopeKeys(String scopeKeys, int tenantId)
throws APIManagementException {
return apiMgtDAO.getScopesByScopeKeys(scopeKeys, tenantId);
}
@Override
public String getGroupId(int appId) throws APIManagementException {
return apiMgtDAO.getGroupId(appId);
}
@Override
public String[] getGroupIds(String response) throws APIManagementException {
String groupingExtractorClass = APIUtil.getGroupingExtractorImplementation();
if (groupingExtractorClass != null) {
try {
LoginPostExecutor groupingExtractor = (LoginPostExecutor) APIUtil.getClassForName
(groupingExtractorClass).newInstance();
//switching 2.1.0 and 2.2.0
if (APIUtil.isMultiGroupAppSharingEnabled()) {
NewPostLoginExecutor newGroupIdListExtractor = (NewPostLoginExecutor) groupingExtractor;
return newGroupIdListExtractor.getGroupingIdentifierList(response);
} else {
String groupId = groupingExtractor.getGroupingIdentifiers(response);
return new String[] {groupId};
}
} catch (ClassNotFoundException e) {
String msg = groupingExtractorClass + " is not found in runtime";
log.error(msg, e);
throw new APIManagementException(msg, e);
} catch (ClassCastException e) {
String msg = "Cannot cast " + groupingExtractorClass + " NewPostLoginExecutor";
log.error(msg, e);
throw new APIManagementException(msg, e);
} catch (IllegalAccessException e) {
String msg = "Error occurred while invocation of getGroupingIdentifier method";
log.error(msg, e);
throw new APIManagementException(msg, e);
} catch (InstantiationException e) {
String msg = "Error occurred while instantiating " + groupingExtractorClass + " class";
log.error(msg, e);
throw new APIManagementException(msg, e);
}
}
return null;
}
/**
* Returns all applications associated with given subscriber, groupingId and search criteria.
*
* @param subscriber Subscriber
* @param groupingId The groupId to which the applications must belong.
* @param offset The offset.
* @param search The search string.
* @param sortColumn The sort column.
* @param sortOrder The sort order.
* @return Application[] The Applications.
* @throws APIManagementException
*/
@Override
public Application[] getApplicationsWithPagination(Subscriber subscriber, String groupingId, int start , int offset
, String search, String sortColumn, String sortOrder)
throws APIManagementException {
return apiMgtDAO.getApplicationsWithPagination(subscriber, groupingId, start, offset,
search, sortColumn, sortOrder);
}
/**
* Returns all applications associated with given subscriber and groupingId.
*
* @param subscriber The subscriber.
* @param groupingId The groupId to which the applications must belong.
* @return Application[] Array of applications.
* @throws APIManagementException
*/
@Override
public Application[] getApplications(Subscriber subscriber, String groupingId)
throws APIManagementException {
Application[] applications = apiMgtDAO.getApplications(subscriber, groupingId);
for (Application application : applications) {
Set<APIKey> keys = getApplicationKeys(application.getId());
for (APIKey key : keys) {
application.addKey(key);
}
}
return applications;
}
/**
* Returns all API keys associated with given application id.
*
* @param applicationId The id of the application.
* @return Set<APIKey> Set of API keys of the application.
* @throws APIManagementException
*/
protected Set<APIKey> getApplicationKeys(int applicationId) throws APIManagementException {
Set<APIKey> apiKeys = new HashSet<APIKey>();
APIKey productionKey = getApplicationKey(applicationId, APIConstants.API_KEY_TYPE_PRODUCTION);
if (productionKey != null) {
apiKeys.add(productionKey);
} else {
productionKey = apiMgtDAO.getKeyStatusOfApplication(APIConstants.API_KEY_TYPE_PRODUCTION, applicationId);
if (productionKey != null) {
productionKey.setType(APIConstants.API_KEY_TYPE_PRODUCTION);
apiKeys.add(productionKey);
}
}
APIKey sandboxKey = getApplicationKey(applicationId, APIConstants.API_KEY_TYPE_SANDBOX);
if (sandboxKey != null) {
apiKeys.add(sandboxKey);
} else {
sandboxKey = apiMgtDAO.getKeyStatusOfApplication(APIConstants.API_KEY_TYPE_SANDBOX, applicationId);
if (sandboxKey != null) {
sandboxKey.setType(APIConstants.API_KEY_TYPE_SANDBOX);
apiKeys.add(sandboxKey);
}
}
return apiKeys;
}
/**
* Returns the key associated with given application id and key type.
*
* @param applicationId Id of the Application.
* @param keyType The type of key.
* @return APIKey The key of the application.
* @throws APIManagementException
*/
protected APIKey getApplicationKey(int applicationId, String keyType) throws APIManagementException {
String consumerKey = apiMgtDAO.getConsumerkeyByApplicationIdAndKeyType(String.valueOf(applicationId), keyType);
if (StringUtils.isNotEmpty(consumerKey)) {
String consumerKeyStatus = apiMgtDAO.getKeyStatusOfApplication(keyType, applicationId).getState();
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
OAuthApplicationInfo oAuthApplicationInfo = keyManager.retrieveApplication(consumerKey);
AccessTokenInfo tokenInfo = keyManager.getAccessTokenByConsumerKey(consumerKey);
APIKey apiKey = new APIKey();
apiKey.setConsumerKey(consumerKey);
apiKey.setType(keyType);
apiKey.setState(consumerKeyStatus);
if (oAuthApplicationInfo != null) {
apiKey.setConsumerSecret(oAuthApplicationInfo.getClientSecret());
apiKey.setCallbackUrl(oAuthApplicationInfo.getCallBackURL());
if (oAuthApplicationInfo.getParameter(APIConstants.JSON_GRANT_TYPES) != null) {
apiKey.setGrantTypes(oAuthApplicationInfo.getParameter(APIConstants.JSON_GRANT_TYPES).toString());
}
}
if (tokenInfo != null) {
apiKey.setAccessToken(tokenInfo.getAccessToken());
apiKey.setValidityPeriod(tokenInfo.getValidityPeriod());
apiKey.setTokenScope(getScopeString(tokenInfo.getScopes()));
} else {
if (log.isDebugEnabled()) {
log.debug("Access token does not exist for Consumer Key: " + consumerKey);
}
}
return apiKey;
}
if (log.isDebugEnabled()) {
log.debug("Consumer key does not exist for Application Id: " + applicationId + " Key Type: " + keyType);
}
return null;
}
/**
* Returns a single string containing the provided array of scopes.
*
* @param scopes The array of scopes.
* @return String Single string containing the provided array of scopes.
*/
private String getScopeString(String[] scopes) {
return StringUtils.join(scopes, " ");
}
@Override
public Application[] getLightWeightApplications(Subscriber subscriber, String groupingId) throws
APIManagementException {
return apiMgtDAO.getLightWeightApplications(subscriber, groupingId);
}
/**
* @param userId Subscriber name.
* @param applicationName of the Application.
* @param tokenType Token type (PRODUCTION | SANDBOX)
* @param callbackUrl callback URL
* @param allowedDomains allowedDomains for token.
* @param validityTime validity time period.
* @param groupingId APIM application id.
* @param jsonString Callback URL for the Application.
* @param tokenScope Scopes for the requested tokens.
* @return
* @throws APIManagementException
*/
@Override
public OAuthApplicationInfo updateAuthClient(String userId, String applicationName,
String tokenType,
String callbackUrl, String[] allowedDomains,
String validityTime,
String tokenScope,
String groupingId,
String jsonString) throws APIManagementException {
boolean tenantFlowStarted = false;
try {
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
tenantFlowStarted = true;
}
Application application = ApplicationUtils.retrieveApplication(applicationName, userId, groupingId);
final String subscriberName = application.getSubscriber().getName();
boolean isCaseInsensitiveComparisons = Boolean.parseBoolean(getAPIManagerConfiguration().
getFirstProperty(APIConstants.API_STORE_FORCE_CI_COMPARISIONS));
boolean isUserAppOwner;
if (isCaseInsensitiveComparisons) {
isUserAppOwner = subscriberName.equalsIgnoreCase(userId);
} else {
isUserAppOwner = subscriberName.equals(userId);
}
if (!isUserAppOwner) {
throw new APIManagementException("user: " + userId + ", attempted to update OAuth application " +
"owned by: " + subscriberName);
}
//Create OauthAppRequest object by passing json String.
OAuthAppRequest oauthAppRequest = ApplicationUtils.createOauthAppRequest(applicationName, null, callbackUrl,
tokenScope, jsonString, application.getTokenType());
oauthAppRequest.getOAuthApplicationInfo().addParameter(ApplicationConstants.APP_KEY_TYPE, tokenType);
String consumerKey = apiMgtDAO.getConsumerKeyForApplicationKeyType(applicationName, userId, tokenType,
groupingId);
oauthAppRequest.getOAuthApplicationInfo().setClientId(consumerKey);
//get key manager instance.
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
//call update method.
OAuthApplicationInfo updatedAppInfo = keyManager.updateApplication(oauthAppRequest);
JSONObject appLogObject = new JSONObject();
appLogObject.put(APIConstants.AuditLogConstants.APPLICATION_NAME, updatedAppInfo.getClientName());
appLogObject.put("Updated Oauth app with Call back URL", callbackUrl);
appLogObject.put("Updated Oauth app with grant types", jsonString);
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.UPDATED, this.username);
return updatedAppInfo;
} finally {
if (tenantFlowStarted) {
endTenantFlow();
}
}
}
/**
* @param userId Subscriber name.
* @param applicationName of the Application.
* @param applicationId of the Application.
* @param tokenType Token type (PRODUCTION | SANDBOX)
* @param callbackUrl callback URL
* @param allowedDomains allowedDomains for token.
* @param validityTime validity time period.
* @param groupingId APIM application id.
* @param jsonString Callback URL for the Application.
* @param tokenScope Scopes for the requested tokens.
* @return
* @throws APIManagementException
*/
@Override
public OAuthApplicationInfo updateAuthClientByAppId(String userId, String applicationName, int applicationId,
String tokenType, String callbackUrl, String[] allowedDomains, String validityTime, String tokenScope,
String groupingId, String jsonString) throws APIManagementException {
boolean tenantFlowStarted = false;
try {
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
tenantFlowStarted = startTenantFlowForTenantDomain(tenantDomain);
}
Application application = ApplicationUtils.retrieveApplicationById(applicationId);
//Create OauthAppRequest object by passing json String.
OAuthAppRequest oauthAppRequest = ApplicationUtils.createOauthAppRequest(applicationName, null, callbackUrl,
tokenScope, jsonString, application.getTokenType());
oauthAppRequest.getOAuthApplicationInfo().addParameter(ApplicationConstants.APP_KEY_TYPE, tokenType);
String consumerKey = apiMgtDAO.getConsumerKeyForApplicationKeyType(applicationId, userId, tokenType,
groupingId);
oauthAppRequest.getOAuthApplicationInfo().setClientId(consumerKey);
//get key manager instance.
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
//call update method.
OAuthApplicationInfo updatedAppInfo = keyManager.updateApplication(oauthAppRequest);
JSONObject appLogObject = new JSONObject();
appLogObject.put(APIConstants.AuditLogConstants.APPLICATION_NAME, updatedAppInfo.getClientName());
appLogObject.put("Updated Oauth app with Call back URL", callbackUrl);
appLogObject.put("Updated Oauth app with grant types", jsonString);
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.UPDATED, this.username);
return updatedAppInfo;
} finally {
if (tenantFlowStarted) {
endTenantFlow();
}
}
}
/**
* This method perform delete oAuth application.
*
* @param consumerKey
* @throws APIManagementException
*/
@Override
public void deleteOAuthApplication(String consumerKey) throws APIManagementException {
//get key manager instance.
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
//delete oAuthApplication by calling key manager implementation
keyManager.deleteApplication(consumerKey);
Map<String, String> applicationIdAndTokenTypeMap =
apiMgtDAO.getApplicationIdAndTokenTypeByConsumerKey(consumerKey);
if (applicationIdAndTokenTypeMap != null) {
String applicationId = applicationIdAndTokenTypeMap.get("application_id");
String tokenType = applicationIdAndTokenTypeMap.get("token_type");
if (applicationId != null && tokenType != null) {
apiMgtDAO.deleteApplicationKeyMappingByConsumerKey(consumerKey);
apiMgtDAO.deleteApplicationRegistration(applicationId, tokenType);
}
}
}
@Override
public Application[] getApplicationsByOwner(String userId) throws APIManagementException {
return apiMgtDAO.getApplicationsByOwner(userId);
}
@Override
public boolean updateApplicationOwner(String userId, Application application) throws APIManagementException {
boolean isAppUpdated = false;
try {
RealmService realmService = ServiceReferenceHolder.getInstance().getRealmService();
int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(MultitenantUtils.getTenantDomain(username));
UserStoreManager userStoreManager = realmService.getTenantUserRealm(tenantId).getUserStoreManager();
String oldUserName = application.getSubscriber().getName();
String[] oldUserRoles = userStoreManager.getRoleListOfUser(MultitenantUtils.getTenantAwareUsername
(oldUserName));
String[] newUserRoles = userStoreManager.getRoleListOfUser(MultitenantUtils.getTenantAwareUsername
(userId));
List<String> roleList = new ArrayList<String>();
roleList.addAll(Arrays.asList(newUserRoles));
for (String role : oldUserRoles) {
if (role.contains(application.getName())) {
roleList.add(role);
}
}
String[] roleArr = roleList.toArray(new String[roleList.size()]);
APIManagerConfiguration config = getAPIManagerConfiguration();
String serverURL = config.getFirstProperty(APIConstants.AUTH_MANAGER_URL) + "UserAdmin";
String adminUsername = config.getFirstProperty(APIConstants.AUTH_MANAGER_USERNAME);
String adminPassword = config.getFirstProperty(APIConstants.AUTH_MANAGER_PASSWORD);
UserAdminStub userAdminStub = new UserAdminStub(serverURL);
CarbonUtils.setBasicAccessSecurityHeaders(adminUsername, adminPassword, userAdminStub._getServiceClient());
userAdminStub.updateRolesOfUser(userId, roleArr);
isAppUpdated = true;
} catch (org.wso2.carbon.user.api.UserStoreException e) {
handleException("Error when getting the tenant's UserStoreManager or when getting roles of user ", e);
} catch (RemoteException e) {
handleException("Server couldn't establish connection with auth manager ", e);
} catch (UserAdminUserAdminException e) {
handleException("Error when getting the tenant's UserStoreManager or when getting roles of user ", e);
}
if (isAppUpdated) {
isAppUpdated = apiMgtDAO.updateApplicationOwner(userId, application);
}
//todo update Outh application once the oauth component supports to update the owner
return isAppUpdated;
}
public JSONObject resumeWorkflow(Object[] args) {
JSONObject row = new JSONObject();
if (args != null && APIUtil.isStringArray(args)) {
String workflowReference = (String) args[0];
String status = (String) args[1];
String description = null;
if (args.length > 2 && args[2] != null) {
description = (String) args[2];
}
boolean isTenantFlowStarted = false;
try {
// if (workflowReference != null) {
WorkflowDTO workflowDTO = apiMgtDAO.retrieveWorkflow(workflowReference);
if (workflowDTO == null) {
log.error("Could not find workflow for reference " + workflowReference);
row.put("error", Boolean.TRUE);
row.put("statusCode", 500);
row.put("message", "Could not find workflow for reference " + workflowReference);
return row;
}
String tenantDomain = workflowDTO.getTenantDomain();
if (tenantDomain != null && !org.wso2.carbon.utils.multitenancy.MultitenantConstants
.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
isTenantFlowStarted = startTenantFlowForTenantDomain(tenantDomain);
}
workflowDTO.setWorkflowDescription(description);
workflowDTO.setStatus(WorkflowStatus.valueOf(status));
String workflowType = workflowDTO.getWorkflowType();
WorkflowExecutor workflowExecutor;
try {
workflowExecutor = getWorkflowExecutor(workflowType);
workflowExecutor.complete(workflowDTO);
} catch (WorkflowException e) {
throw new APIManagementException(e);
}
row.put("error", Boolean.FALSE);
row.put("statusCode", 200);
row.put("message", "Invoked workflow completion successfully.");
// }
} catch (IllegalArgumentException e) {
String msg = "Illegal argument provided. Valid values for status are APPROVED and REJECTED.";
log.error(msg, e);
row.put("error", Boolean.TRUE);
row.put("statusCode", 500);
row.put("message", msg);
} catch (APIManagementException e) {
String msg = "Error while resuming the workflow. ";
log.error(msg, e);
row.put("error", Boolean.TRUE);
row.put("statusCode", 500);
row.put("message", msg + e.getMessage());
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
}
return row;
}
protected void endTenantFlow() {
PrivilegedCarbonContext.endTenantFlow();
}
protected boolean startTenantFlowForTenantDomain(String tenantDomain) {
boolean isTenantFlowStarted = true;
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
return isTenantFlowStarted;
}
/**
* Returns a workflow executor
*
* @param workflowType Workflow executor type
* @return WorkflowExecutor of given type
* @throws WorkflowException if an error occurred while getting WorkflowExecutor
*/
protected WorkflowExecutor getWorkflowExecutor(String workflowType) throws WorkflowException {
return WorkflowExecutorFactory.getInstance().getWorkflowExecutor(workflowType);
}
@Override
public boolean isMonetizationEnabled(String tenantDomain) throws APIManagementException {
JSONObject apiTenantConfig = null;
try {
String content = apimRegistryService.getConfigRegistryResourceContent(tenantDomain, APIConstants.API_TENANT_CONF_LOCATION);
if (content != null) {
JSONParser parser = new JSONParser();
apiTenantConfig = (JSONObject) parser.parse(content);
}
} catch (UserStoreException e) {
handleException("UserStoreException thrown when getting API tenant config from registry", e);
} catch (RegistryException e) {
handleException("RegistryException thrown when getting API tenant config from registry", e);
} catch (ParseException e) {
handleException("ParseException thrown when passing API tenant config from registry", e);
}
return getTenantConfigValue(tenantDomain, apiTenantConfig, APIConstants.API_TENANT_CONF_ENABLE_MONITZATION_KEY);
}
private boolean getTenantConfigValue(String tenantDomain, JSONObject apiTenantConfig, String configKey) throws APIManagementException {
if (apiTenantConfig != null) {
Object value = apiTenantConfig.get(configKey);
if (value != null) {
return Boolean.parseBoolean(value.toString());
}
else {
throw new APIManagementException(configKey + " config does not exist for tenant " + tenantDomain);
}
}
return false;
}
/**
* To get the query to retrieve user role list query based on current role list.
*
* @return the query with user role list.
* @throws APIManagementException API Management Exception.
*/
private String getUserRoleListQuery() throws APIManagementException {
StringBuilder rolesQuery = new StringBuilder();
rolesQuery.append('(');
rolesQuery.append(APIConstants.NULL_USER_ROLE_LIST);
String[] userRoles = APIUtil.getListOfRoles((userNameWithoutChange != null)? userNameWithoutChange: username);
if (userRoles != null) {
for (String userRole : userRoles) {
rolesQuery.append(" OR ");
rolesQuery.append(ClientUtils.escapeQueryChars(APIUtil.sanitizeUserRole(userRole.toLowerCase())));
}
}
rolesQuery.append(")");
if(log.isDebugEnabled()) {
log.debug("User role list solr query " + APIConstants.STORE_VIEW_ROLES + "=" + rolesQuery.toString());
}
return APIConstants.STORE_VIEW_ROLES + "=" + rolesQuery.toString();
}
/**
* To get the current user's role list.
*
* @return user role list.
* @throws APIManagementException API Management Exception.
*/
private List<String> getUserRoleList() throws APIManagementException {
List<String> userRoleList;
if (userNameWithoutChange == null) {
userRoleList = new ArrayList<String>() {{
add(APIConstants.NULL_USER_ROLE_LIST);
}};
} else {
userRoleList = new ArrayList<String>(Arrays.asList(APIUtil.getListOfRoles(userNameWithoutChange)));
}
return userRoleList;
}
@Override
protected String getSearchQuery(String searchQuery) throws APIManagementException {
if (!isAccessControlRestrictionEnabled || ( userNameWithoutChange != null &&
APIUtil.hasPermission(userNameWithoutChange, APIConstants.Permissions
.APIM_ADMIN))) {
return searchQuery;
}
String criteria = getUserRoleListQuery();
if (searchQuery != null && !searchQuery.trim().isEmpty()) {
criteria = criteria + "&" + searchQuery;
}
return criteria;
}
@Override
public String getWSDLDocument(String username, String tenantDomain, String resourceUrl,
Map environmentDetails, Map apiDetails) throws APIManagementException {
if (username == null) {
username = APIConstants.END_USER_ANONYMOUS;
}
if (tenantDomain == null) {
tenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME;
}
Map<String, Object> docResourceMap = APIUtil.getDocument(username, resourceUrl, tenantDomain);
String wsdlContent = "";
if (log.isDebugEnabled()) {
log.debug("WSDL document resource availability: " + docResourceMap.isEmpty());
}
if (!docResourceMap.isEmpty()) {
try {
ByteArrayOutputStream arrayOutputStream = new ByteArrayOutputStream();
IOUtils.copy((InputStream) docResourceMap.get("Data"), arrayOutputStream);
String apiName = (String) apiDetails.get(API_NAME);
String apiVersion = (String) apiDetails.get(API_VERSION);
String apiProvider = (String) apiDetails.get(API_PROVIDER);
String environmentName = (String) environmentDetails.get(ENVIRONMENT_NAME);
String environmentType = (String) environmentDetails.get(ENVIRONMENT_TYPE);
if (log.isDebugEnabled()) {
log.debug("Published SOAP api gateway environment name: " + environmentName + " environment type: "
+ environmentType);
}
byte[] updatedWSDLContent = this.getUpdatedWSDLByEnvironment(resourceUrl,
arrayOutputStream.toByteArray(), environmentName, environmentType, apiName, apiVersion, apiProvider);
wsdlContent = new String(updatedWSDLContent);
} catch (IOException e) {
handleException("Error occurred while copying wsdl content into byte array stream for resource: "
+ resourceUrl, e);
}
} else {
handleException("No wsdl resource found for resource path: " + resourceUrl);
}
JSONObject data = new JSONObject();
data.put(APIConstants.DOCUMENTATION_RESOURCE_MAP_CONTENT_TYPE,
docResourceMap.get(APIConstants.DOCUMENTATION_RESOURCE_MAP_CONTENT_TYPE));
data.put(APIConstants.DOCUMENTATION_RESOURCE_MAP_NAME,
docResourceMap.get(APIConstants.DOCUMENTATION_RESOURCE_MAP_NAME));
data.put(APIConstants.DOCUMENTATION_RESOURCE_MAP_DATA, wsdlContent);
if (log.isDebugEnabled()) {
log.debug("Updated wsdl content details for wsdl resource: " + docResourceMap.get("name") + " is " +
data.toJSONString());
}
return data.toJSONString();
}
/**
* To check authorization of the API against current logged in user. If the user is not authorized an exception
* will be thrown.
*
* @param identifier API identifier
* @throws APIManagementException APIManagementException
*/
protected void checkAccessControlPermission(APIIdentifier identifier) throws APIManagementException {
if (identifier == null || !isAccessControlRestrictionEnabled) {
if (!isAccessControlRestrictionEnabled && log.isDebugEnabled() && identifier != null) {
log.debug(
"Publisher access control restriction is not enabled. Hence the API " + identifier.getApiName()
+ " should not be checked for further permission. Registry permission check "
+ "is sufficient");
}
return;
}
String apiPath = APIUtil.getAPIPath(identifier);
Registry registry;
try {
// Need user name with tenant domain to get correct domain name from
// MultitenantUtils.getTenantDomain(username)
String userNameWithTenantDomain = (userNameWithoutChange != null) ? userNameWithoutChange : username;
String apiTenantDomain = getTenantDomain(identifier);
int apiTenantId = getTenantManager().getTenantId(apiTenantDomain);
if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(apiTenantDomain)) {
APIUtil.loadTenantRegistry(apiTenantId);
}
if (this.tenantDomain == null || !this.tenantDomain.equals(apiTenantDomain)) { //cross tenant scenario
registry = getRegistryService().getGovernanceUserRegistry(
getTenantAwareUsername(APIUtil.replaceEmailDomainBack(identifier.getProviderName())),
apiTenantId);
} else {
registry = this.registry;
}
Resource apiResource = registry.get(apiPath);
String accessControlProperty = apiResource.getProperty(APIConstants.ACCESS_CONTROL);
if (accessControlProperty == null || accessControlProperty.trim().isEmpty() || accessControlProperty
.equalsIgnoreCase(APIConstants.NO_ACCESS_CONTROL)) {
if (log.isDebugEnabled()) {
log.debug("API in the path " + apiPath + " does not have any access control restriction");
}
return;
}
if (APIUtil.hasPermission(userNameWithTenantDomain, APIConstants.Permissions.APIM_ADMIN)) {
return;
}
String storeVisibilityRoles = apiResource.getProperty(APIConstants.STORE_VIEW_ROLES);
if (storeVisibilityRoles != null && !storeVisibilityRoles.trim().isEmpty()) {
String[] storeVisibilityRoleList = storeVisibilityRoles.split(",");
if (log.isDebugEnabled()) {
log.debug("API has restricted access to users with the roles : " + Arrays
.toString(storeVisibilityRoleList));
}
String[] userRoleList = APIUtil.getListOfRoles(userNameWithTenantDomain);
if (log.isDebugEnabled()) {
log.debug("User " + username + " has roles " + Arrays.toString(userRoleList));
}
for (String role : storeVisibilityRoleList) {
role = role.trim();
if (role.equalsIgnoreCase(APIConstants.NULL_USER_ROLE_LIST) || APIUtil
.compareRoleList(userRoleList, role)) {
return;
}
}
if (log.isDebugEnabled()) {
log.debug("API " + identifier + " cannot be accessed by user '" + username + "'. It "
+ "has a store visibility restriction");
}
throw new APIManagementException(
APIConstants.UN_AUTHORIZED_ERROR_MESSAGE + " view the API " + identifier);
}
} catch (RegistryException e) {
throw new APIManagementException(
"Registry Exception while trying to check the store visibility restriction of API " + identifier
.getApiName(), e);
} catch (org.wso2.carbon.user.api.UserStoreException e) {
String msg = "Failed to get API from : " + apiPath;
log.error(msg, e);
throw new APIManagementException(msg, e);
}
}
/**
* This method is used to get the updated wsdl with the respective environment apis are published
*
* @param wsdlResourcePath registry resource path to the wsdl
* @param wsdlContent wsdl resource content as byte array
* @param environmentType gateway environment type
* @return updated wsdl content with environment endpoints
* @throws APIManagementException
*/
private byte[] getUpdatedWSDLByEnvironment(String wsdlResourcePath, byte[] wsdlContent, String environmentName,
String environmentType, String apiName, String apiVersion, String apiProvider) throws APIManagementException {
APIMWSDLReader apimwsdlReader = new APIMWSDLReader(wsdlResourcePath);
Definition definition = apimwsdlReader.getWSDLDefinitionFromByteContent(wsdlContent, false);
byte[] updatedWSDLContent = null;
boolean isTenantFlowStarted = false;
try {
String tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(apiProvider));
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
isTenantFlowStarted = true;
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
}
RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService();
int tenantId;
UserRegistry registry;
try {
tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(tenantDomain);
APIUtil.loadTenantRegistry(tenantId);
registry = registryService.getGovernanceSystemRegistry(tenantId);
API api = null;
if (!StringUtils.isEmpty(apiName) && !StringUtils.isEmpty(apiVersion)) {
APIIdentifier apiIdentifier = new APIIdentifier(APIUtil.replaceEmailDomain(apiProvider), apiName, apiVersion);
if (log.isDebugEnabled()) {
log.debug("Api identifier for the soap api artifact: " + apiIdentifier + "for api name: "
+ apiName + ", version: " + apiVersion);
}
GenericArtifact apiArtifact = APIUtil.getAPIArtifact(apiIdentifier, registry);
api = APIUtil.getAPI(apiArtifact);
if (log.isDebugEnabled()) {
if (api != null) {
log.debug(
"Api context for the artifact with id:" + api.getId() + " is " + api.getContext());
} else {
log.debug("Api does not exist for api name: " + apiIdentifier.getApiName());
}
}
} else {
handleException("Artifact does not exist in the registry for api name: " + apiName +
" and version: " + apiVersion);
}
if (api != null) {
try {
apimwsdlReader.setServiceDefinition(definition, api, environmentName, environmentType);
if (log.isDebugEnabled()) {
log.debug("Soap api with context:" + api.getContext() + " in " + environmentName
+ " with environment type" + environmentType);
}
updatedWSDLContent = apimwsdlReader.getWSDL(definition);
} catch (APIManagementException e) {
handleException("Error occurred while processing the wsdl for api: " + api.getId());
}
} else {
handleException("Error while getting API object for wsdl artifact");
}
} catch (UserStoreException e) {
handleException("Error while reading tenant information", e);
} catch (RegistryException e) {
handleException("Error when create registry instance", e);
}
} finally {
if (isTenantFlowStarted) {
PrivilegedCarbonContext.endTenantFlow();
}
}
return updatedWSDLContent;
}
/**
* This method is used to get keys of custom attributes, configured by user
*
* @param userId user name of logged in user
* @return Array of JSONObject, contains keys of attributes
* @throws APIManagementException
*/
public JSONArray getAppAttributesFromConfig(String userId) throws APIManagementException {
String tenantDomain = MultitenantUtils.getTenantDomain(userId);
int tenantId = 0;
try {
tenantId = getTenantId(tenantDomain);
} catch (UserStoreException e) {
handleException("Error in getting tenantId of " + tenantDomain, e);
}
JSONArray applicationAttributes = null;
JSONObject applicationConfig = APIUtil.getAppAttributeKeysFromRegistry(tenantId);
try {
if (applicationConfig != null) {
applicationAttributes = (JSONArray) applicationConfig.get(APIConstants.ApplicationAttributes.ATTRIBUTES);
} else {
APIManagerConfiguration configuration = getAPIManagerConfiguration();
applicationAttributes = configuration.getApplicationAttributes();
}
} catch (NullPointerException e){
handleException("Error in reading configuration " + e.getMessage(), e);
}
return applicationAttributes;
}
/**
* This method is used to validate keys of custom attributes, configured by user
*
* @param application
* @param userId user name of logged in user
* @throws APIManagementException
*/
public void checkAppAttributes(Application application, String userId) throws APIManagementException {
JSONArray applicationAttributesFromConfig = getAppAttributesFromConfig(userId);
Map<String, String> applicationAttributes = application.getApplicationAttributes();
List attributeKeys = new ArrayList<String>();
int applicationId = application.getId();
int tenantId = 0;
Map<String, String> newApplicationAttributes = new HashMap<>();
String tenantDomain = MultitenantUtils.getTenantDomain(userId);
try {
tenantId = getTenantId(tenantDomain);
} catch (UserStoreException e) {
handleException("Error in getting tenantId of " + tenantDomain, e);
}
for (Object object : applicationAttributesFromConfig) {
JSONObject attribute = (JSONObject) object;
attributeKeys.add(attribute.get(APIConstants.ApplicationAttributes.ATTRIBUTE));
}
for (Object key : applicationAttributes.keySet()) {
if (!attributeKeys.contains(key)) {
apiMgtDAO.deleteApplicationAttributes((String) key, applicationId);
if (log.isDebugEnabled()) {
log.debug("Removing " + key + "from application - " + application.getName());
}
}
}
for (Object key : attributeKeys) {
if (!applicationAttributes.keySet().contains(key)) {
newApplicationAttributes.put((String) key, "");
}
}
apiMgtDAO.addApplicationAttributes(newApplicationAttributes, applicationId, tenantId);
}
}
| components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/APIConsumerImpl.java | /*
* Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.apimgt.impl;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.output.ByteArrayOutputStream;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.solr.client.solrj.util.ClientUtils;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
import org.wso2.carbon.CarbonConstants;
import org.wso2.carbon.apimgt.api.APIConsumer;
import org.wso2.carbon.apimgt.api.APIManagementException;
import org.wso2.carbon.apimgt.api.APIMgtResourceNotFoundException;
import org.wso2.carbon.apimgt.api.LoginPostExecutor;
import org.wso2.carbon.apimgt.api.NewPostLoginExecutor;
import org.wso2.carbon.apimgt.api.WorkflowResponse;
import org.wso2.carbon.apimgt.api.model.API;
import org.wso2.carbon.apimgt.api.model.APIIdentifier;
import org.wso2.carbon.apimgt.api.model.APIKey;
import org.wso2.carbon.apimgt.api.model.APIRating;
import org.wso2.carbon.apimgt.api.model.AccessTokenInfo;
import org.wso2.carbon.apimgt.api.model.AccessTokenRequest;
import org.wso2.carbon.apimgt.api.model.Application;
import org.wso2.carbon.apimgt.api.model.ApplicationConstants;
import org.wso2.carbon.apimgt.api.model.ApplicationKeysDTO;
import org.wso2.carbon.apimgt.api.model.Documentation;
import org.wso2.carbon.apimgt.api.model.KeyManager;
import org.wso2.carbon.apimgt.api.model.OAuthAppRequest;
import org.wso2.carbon.apimgt.api.model.OAuthApplicationInfo;
import org.wso2.carbon.apimgt.api.model.Scope;
import org.wso2.carbon.apimgt.api.model.SubscribedAPI;
import org.wso2.carbon.apimgt.api.model.Subscriber;
import org.wso2.carbon.apimgt.api.model.SubscriptionResponse;
import org.wso2.carbon.apimgt.api.model.Tag;
import org.wso2.carbon.apimgt.api.model.Tier;
import org.wso2.carbon.apimgt.api.model.TierPermission;
import org.wso2.carbon.apimgt.impl.caching.CacheInvalidator;
import org.wso2.carbon.apimgt.impl.dto.ApplicationRegistrationWorkflowDTO;
import org.wso2.carbon.apimgt.impl.dto.ApplicationWorkflowDTO;
import org.wso2.carbon.apimgt.impl.dto.SubscriptionWorkflowDTO;
import org.wso2.carbon.apimgt.impl.dto.TierPermissionDTO;
import org.wso2.carbon.apimgt.impl.dto.WorkflowDTO;
import org.wso2.carbon.apimgt.impl.factory.KeyManagerHolder;
import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder;
import org.wso2.carbon.apimgt.impl.utils.APIMWSDLReader;
import org.wso2.carbon.apimgt.impl.utils.APINameComparator;
import org.wso2.carbon.apimgt.impl.utils.APIUtil;
import org.wso2.carbon.apimgt.impl.utils.APIVersionComparator;
import org.wso2.carbon.apimgt.impl.utils.ApplicationUtils;
import org.wso2.carbon.apimgt.impl.workflow.AbstractApplicationRegistrationWorkflowExecutor;
import org.wso2.carbon.apimgt.impl.workflow.GeneralWorkflowResponse;
import org.wso2.carbon.apimgt.impl.workflow.WorkflowConstants;
import org.wso2.carbon.apimgt.impl.workflow.WorkflowException;
import org.wso2.carbon.apimgt.impl.workflow.WorkflowExecutor;
import org.wso2.carbon.apimgt.impl.workflow.WorkflowExecutorFactory;
import org.wso2.carbon.apimgt.impl.workflow.WorkflowStatus;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.governance.api.common.dataobjects.GovernanceArtifact;
import org.wso2.carbon.governance.api.exception.GovernanceException;
import org.wso2.carbon.governance.api.generic.GenericArtifactManager;
import org.wso2.carbon.governance.api.generic.dataobjects.GenericArtifact;
import org.wso2.carbon.governance.api.util.GovernanceUtils;
import org.wso2.carbon.registry.common.TermData;
import org.wso2.carbon.registry.core.ActionConstants;
import org.wso2.carbon.registry.core.Association;
import org.wso2.carbon.registry.core.Registry;
import org.wso2.carbon.registry.core.RegistryConstants;
import org.wso2.carbon.registry.core.Resource;
import org.wso2.carbon.registry.core.config.RegistryContext;
import org.wso2.carbon.registry.core.exceptions.RegistryException;
import org.wso2.carbon.registry.core.pagination.PaginationContext;
import org.wso2.carbon.registry.core.service.RegistryService;
import org.wso2.carbon.registry.core.session.UserRegistry;
import org.wso2.carbon.registry.core.utils.RegistryUtils;
import org.wso2.carbon.user.api.AuthorizationManager;
import org.wso2.carbon.user.api.UserStoreException;
import org.wso2.carbon.user.api.UserStoreManager;
import org.wso2.carbon.user.core.service.RealmService;
import org.wso2.carbon.user.mgt.stub.UserAdminStub;
import org.wso2.carbon.user.mgt.stub.UserAdminUserAdminException;
import org.wso2.carbon.utils.CarbonUtils;
import org.wso2.carbon.utils.multitenancy.MultitenantConstants;
import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.cache.Caching;
import javax.wsdl.Definition;
/**
* This class provides the core API store functionality. It is implemented in a very
* self-contained and 'pure' manner, without taking requirements like security into account,
* which are subject to frequent change. Due to this 'pure' nature and the significance of
* the class to the overall API management functionality, the visibility of the class has
* been reduced to package level. This means we can still use it for internal purposes and
* possibly even extend it, but it's totally off the limits of the users. Users wishing to
* programmatically access this functionality should use one of the extensions of this
* class which is visible to them. These extensions may add additional features like
* security to this class.
*/
public class APIConsumerImpl extends AbstractAPIManager implements APIConsumer {
private static final Log log = LogFactory.getLog(APIConsumerImpl.class);
public static final char COLON_CHAR = ':';
public static final String EMPTY_STRING = "";
public static final String ENVIRONMENT_NAME = "environmentName";
public static final String ENVIRONMENT_TYPE = "environmentType";
public static final String API_NAME = "apiName";
public static final String API_VERSION = "apiVersion";
public static final String API_PROVIDER = "apiProvider";
/* Map to Store APIs against Tag */
private ConcurrentMap<String, Set<API>> taggedAPIs = new ConcurrentHashMap<String, Set<API>>();
private boolean isTenantModeStoreView;
private String requestedTenant;
private boolean isTagCacheEnabled;
private Set<Tag> tagSet;
private long tagCacheValidityTime;
private volatile long lastUpdatedTime;
private volatile long lastUpdatedTimeForTagApi;
private final Object tagCacheMutex = new Object();
private final Object tagWithAPICacheMutex = new Object();
protected APIMRegistryService apimRegistryService;
protected String userNameWithoutChange;
public APIConsumerImpl() throws APIManagementException {
super();
readTagCacheConfigs();
}
public APIConsumerImpl(String username, APIMRegistryService apimRegistryService) throws APIManagementException {
super(username);
userNameWithoutChange = username;
readTagCacheConfigs();
this.apimRegistryService = apimRegistryService;
}
private void readTagCacheConfigs() {
APIManagerConfiguration config = getAPIManagerConfiguration();
String enableTagCache = config.getFirstProperty(APIConstants.STORE_TAG_CACHE_DURATION);
if (enableTagCache == null) {
isTagCacheEnabled = false;
tagCacheValidityTime = 0;
} else {
isTagCacheEnabled = true;
tagCacheValidityTime = Long.parseLong(enableTagCache);
}
}
@Override
public Subscriber getSubscriber(String subscriberId) throws APIManagementException {
Subscriber subscriber = null;
try {
subscriber = apiMgtDAO.getSubscriber(subscriberId);
} catch (APIManagementException e) {
handleException("Failed to get Subscriber", e);
}
return subscriber;
}
/**
* Returns the set of APIs with the given tag from the taggedAPIs Map
*
* @param tagName The name of the tag
* @return Set of {@link API} with the given tag
* @throws APIManagementException
*/
@Override
public Set<API> getAPIsWithTag(String tagName, String requestedTenantDomain) throws APIManagementException {
/* We keep track of the lastUpdatedTime of the TagCache to determine its freshness.
*/
long lastUpdatedTimeAtStart = lastUpdatedTimeForTagApi;
long currentTimeAtStart = System.currentTimeMillis();
if(isTagCacheEnabled && ( (currentTimeAtStart- lastUpdatedTimeAtStart) < tagCacheValidityTime)){
if (taggedAPIs != null && taggedAPIs.containsKey(tagName)) {
return taggedAPIs.get(tagName);
}
}else{
synchronized (tagWithAPICacheMutex) {
lastUpdatedTimeForTagApi = System.currentTimeMillis();
taggedAPIs = new ConcurrentHashMap<String, Set<API>>();
}
}
boolean isTenantMode = requestedTenantDomain != null && !"null".equalsIgnoreCase(requestedTenantDomain);
this.isTenantModeStoreView = isTenantMode;
if (requestedTenantDomain != null && !"null".equals(requestedTenantDomain)) {
this.requestedTenant = requestedTenantDomain;
}
Registry userRegistry;
boolean isTenantFlowStarted = false;
Set<API> apisWithTag = null;
try {
//start the tenant flow prior to loading registry
if (requestedTenant != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(requestedTenant)) {
isTenantFlowStarted = startTenantFlowForTenantDomain(requestedTenantDomain);
}
if ((isTenantMode && this.tenantDomain == null) ||
(isTenantMode && isTenantDomainNotMatching(requestedTenantDomain))) {//Tenant store anonymous mode
int tenantId = getTenantId(requestedTenantDomain);
// explicitly load the tenant's registry
APIUtil.loadTenantRegistry(tenantId);
userRegistry = getGovernanceUserRegistry(tenantId);
setUsernameToThreadLocalCarbonContext(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME);
} else {
userRegistry = registry;
setUsernameToThreadLocalCarbonContext(this.username);
}
apisWithTag = getAPIsWithTag(userRegistry, tagName);
/* Add the APIs against the tag name */
if (!apisWithTag.isEmpty()) {
if (taggedAPIs.containsKey(tagName)) {
for (API api : apisWithTag) {
taggedAPIs.get(tagName).add(api);
}
} else {
taggedAPIs.putIfAbsent(tagName, apisWithTag);
}
}
} catch (RegistryException e) {
handleException("Failed to get api by the tag", e);
} catch (UserStoreException e) {
handleException("Failed to get api by the tag", e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
return apisWithTag;
}
protected void setUsernameToThreadLocalCarbonContext(String username) {
PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(username);
}
protected UserRegistry getGovernanceUserRegistry(int tenantId) throws RegistryException {
return ServiceReferenceHolder.getInstance().getRegistryService().
getGovernanceUserRegistry(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME, tenantId);
}
protected int getTenantId(String requestedTenantDomain) throws UserStoreException {
return ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(requestedTenantDomain);
}
/**
* Returns the set of APIs with the given tag from the taggedAPIs Map.
*
* @param tag The name of the tag
* @param start The starting index of the return result set
* @param end The end index of the return result set
* @return A {@link Map} of APIs(between the given indexes) and the total number indicating all the available
* APIs count
* @throws APIManagementException
*/
@Override
public Map<String, Object> getPaginatedAPIsWithTag(String tag, int start, int end, String tenantDomain) throws APIManagementException {
List<API> apiList = new ArrayList<API>();
Set<API> resultSet = new TreeSet<API>(new APIVersionComparator());
Map<String, Object> results = new HashMap<String, Object>();
Set<API> taggedAPISet = this.getAPIsWithTag(tag,tenantDomain);
if (taggedAPISet != null) {
if (taggedAPISet.size() < end) {
end = taggedAPISet.size();
}
int totalLength;
apiList.addAll(taggedAPISet);
totalLength = apiList.size();
if (totalLength <= ((start + end) - 1)) {
end = totalLength;
} else {
end = start + end;
}
for (int i = start; i < end; i++) {
resultSet.add(apiList.get(i));
}
results.put("apis", resultSet);
results.put("length", taggedAPISet.size());
} else {
results.put("apis", null);
results.put("length", 0);
}
return results;
}
/**
* Returns the set of APIs with the given tag, retrieved from registry
*
* @param registry - Current registry; tenant/SuperTenant
* @param tag - The tag name
* @return A {@link Set} of {@link API} objects.
* @throws APIManagementException
*/
private Set<API> getAPIsWithTag(Registry registry, String tag)
throws APIManagementException {
Set<API> apiSet = new TreeSet<API>(new APINameComparator());
try {
List<GovernanceArtifact> genericArtifacts =
GovernanceUtils.findGovernanceArtifacts(getSearchQuery(APIConstants.TAG_SEARCH_TYPE_PREFIX2 + tag), registry,
APIConstants.API_RXT_MEDIA_TYPE);
for (GovernanceArtifact genericArtifact : genericArtifacts) {
try {
String apiStatus = APIUtil.getLcStateFromArtifact(genericArtifact);
if (genericArtifact != null && (APIConstants.PUBLISHED.equals(apiStatus)
|| APIConstants.PROTOTYPED.equals(apiStatus))) {
API api = APIUtil.getAPI(genericArtifact);
if (api != null) {
apiSet.add(api);
}
}
} catch (RegistryException e) {
log.warn("User is not authorized to get an API with tag " + tag, e);
}
}
} catch (RegistryException e) {
handleException("Failed to get API for tag " + tag, e);
}
return apiSet;
}
/**
* The method to get APIs to Store view
*
* @return Set<API> Set of APIs
* @throws APIManagementException
*/
@Override
public Set<API> getAllPublishedAPIs(String tenantDomain) throws APIManagementException {
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
SortedSet<API> apiVersionsSortedSet = new TreeSet<API>(new APIVersionComparator());
try {
Registry userRegistry;
boolean isTenantMode=(tenantDomain != null);
if ((isTenantMode && this.tenantDomain==null) || (isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant store anonymous mode
int tenantId = getTenantId(tenantDomain);
userRegistry = getGovernanceUserRegistry(tenantId);
} else {
userRegistry = registry;
}
this.isTenantModeStoreView = isTenantMode;
this.requestedTenant = tenantDomain;
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY);
if (artifactManager != null) {
GenericArtifact[] genericArtifacts = artifactManager.getAllGenericArtifacts();
if (genericArtifacts == null || genericArtifacts.length == 0) {
return apiSortedSet;
}
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
List<API> multiVersionedAPIs = new ArrayList<API>();
Comparator<API> versionComparator = new APIVersionComparator();
Boolean displayMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
Boolean displayAPIsWithMultipleStatus = APIUtil.isAllowDisplayAPIsWithMultipleStatus();
for (GenericArtifact artifact : genericArtifacts) {
// adding the API provider can mark the latest API .
String status = APIUtil.getLcStateFromArtifact(artifact);
API api = null;
//Check the api-manager.xml config file entry <DisplayAllAPIs> value is false
if (!displayAPIsWithMultipleStatus) {
// then we are only interested in published APIs here...
if (APIConstants.PUBLISHED.equals(status)) {
api = APIUtil.getAPI(artifact);
}
} else { // else we are interested in both deprecated/published APIs here...
if (APIConstants.PUBLISHED.equals(status) || APIConstants.DEPRECATED.equals(status)) {
api = APIUtil.getAPI(artifact);
}
}
if (api != null) {
try {
checkAccessControlPermission(api.getId());
} catch (APIManagementException e) {
// This is a second level of filter to get apis based on access control and visibility.
// Hence log is set as debug and continued.
if(log.isDebugEnabled()) {
log.debug("User is not authorized to view the api " + api.getId().getApiName(), e);
}
continue;
}
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
multiVersionedAPIs.add(api);
}
}
}
if (!displayMultipleVersions) {
apiSortedSet.addAll(latestPublishedAPIs.values());
return apiSortedSet;
} else {
apiVersionsSortedSet.addAll(multiVersionedAPIs);
return apiVersionsSortedSet;
}
} else {
String errorMessage = "Artifact manager is null for tenant domain " + tenantDomain
+ " when retrieving APIs for store. User : " + PrivilegedCarbonContext
.getThreadLocalCarbonContext().getUsername();
log.error(errorMessage);
throw new APIManagementException(errorMessage);
}
} catch (RegistryException e) {
handleException("Failed to get all published APIs", e);
} catch (UserStoreException e) {
handleException("Failed to get all published APIs", e);
}
return apiSortedSet;
}
/**
* The method to get APIs to Store view *
*
* @return Set<API> Set of APIs
* @throws APIManagementException
*/
@Override
@Deprecated
public Map<String,Object> getAllPaginatedPublishedAPIs(String tenantDomain,int start,int end)
throws APIManagementException {
Boolean displayAPIsWithMultipleStatus = false;
try {
if (tenantDomain != null) {
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
}
displayAPIsWithMultipleStatus = APIUtil.isAllowDisplayAPIsWithMultipleStatus();
}finally {
endTenantFlow();
}
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
//Check the api-manager.xml config file entry <DisplayAllAPIs> value is false
if (!displayAPIsWithMultipleStatus) {
//Create the search attribute map
listMap.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(APIConstants.PUBLISHED);
}});
} else{
return getAllPaginatedAPIs(tenantDomain, start, end);
}
Map<String, Object> result = new HashMap<String, Object>();
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
SortedSet<API> apiVersionsSortedSet = new TreeSet<API>(new APIVersionComparator());
int totalLength = 0;
try {
Registry userRegistry;
boolean isTenantMode = (tenantDomain != null);
if ((isTenantMode && this.tenantDomain == null) ||
(isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant store anonymous mode
int tenantId = getTenantId(tenantDomain);
// explicitly load the tenant's registry
APIUtil.loadTenantRegistry(tenantId);
userRegistry = getGovernanceUserRegistry(tenantId);
setUsernameToThreadLocalCarbonContext(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME);
} else {
userRegistry = registry;
setUsernameToThreadLocalCarbonContext(this.username);
}
this.isTenantModeStoreView = isTenantMode;
this.requestedTenant = tenantDomain;
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
List<API> multiVersionedAPIs = new ArrayList<API>();
Comparator<API> versionComparator = new APIVersionComparator();
Boolean displayMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
PaginationContext.init(start, end, "ASC", APIConstants.API_OVERVIEW_NAME, Integer.MAX_VALUE);
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY);
if (artifactManager != null) {
GenericArtifact[] genericArtifacts = artifactManager.findGenericArtifacts(listMap);
totalLength = PaginationContext.getInstance().getLength();
if (genericArtifacts == null || genericArtifacts.length == 0) {
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
return result;
}
for (GenericArtifact artifact : genericArtifacts) {
if (artifact == null) {
log.error("Failed to retrieve artifact when getting paginated published API.");
continue;
}
// adding the API provider can mark the latest API .
API api = APIUtil.getAPI(artifact);
if (api != null) {
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
multiVersionedAPIs.add(api);
}
}
}
if (!displayMultipleVersions) {
apiSortedSet.addAll(latestPublishedAPIs.values());
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
return result;
} else {
apiVersionsSortedSet.addAll(multiVersionedAPIs);
result.put("apis", apiVersionsSortedSet);
result.put("totalLength", totalLength);
return result;
}
} else {
String errorMessage = "Artifact manager is null for tenant domain " + tenantDomain
+ " when retrieving all Published APIs.";
log.error(errorMessage);
}
} catch (RegistryException e) {
handleException("Failed to get all published APIs", e);
} catch (UserStoreException e) {
handleException("Failed to get all published APIs", e);
} finally {
PaginationContext.destroy();
}
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
return result;
}
/**
* Regenerate consumer secret.
*
* @param clientId For which consumer key we need to regenerate consumer secret.
* @return New consumer secret.
* @throws APIManagementException This is the custom exception class for API management.
*/
public String renewConsumerSecret(String clientId) throws APIManagementException {
// Create Token Request with parameters provided from UI.
AccessTokenRequest tokenRequest = new AccessTokenRequest();
tokenRequest.setClientId(clientId);
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
return keyManager.getNewApplicationConsumerSecret(tokenRequest);
}
/**
* The method to get APIs in any of the given LC status array
*
* @return Map<String, Object> API result set with pagination information
* @throws APIManagementException
*/
@Override
public Map<String, Object> getAllPaginatedAPIsByStatus(String tenantDomain,
int start, int end, final String[] apiStatus, boolean returnAPITags) throws APIManagementException {
Map<String,Object> result=new HashMap<String, Object>();
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
SortedSet<API> apiVersionsSortedSet = new TreeSet<API>(new APIVersionComparator());
int totalLength=0;
boolean isMore = false;
String criteria = APIConstants.LCSTATE_SEARCH_TYPE_KEY;
try {
Registry userRegistry;
boolean isTenantMode=(tenantDomain != null);
if ((isTenantMode && this.tenantDomain==null) || (isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant store anonymous mode
int tenantId = getTenantId(tenantDomain);
// explicitly load the tenant's registry
APIUtil.loadTenantRegistry(tenantId);
userRegistry = getGovernanceUserRegistry(tenantId);
setUsernameToThreadLocalCarbonContext(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME);
} else {
userRegistry = registry;
setUsernameToThreadLocalCarbonContext(this.username);
}
this.isTenantModeStoreView = isTenantMode;
this.requestedTenant = tenantDomain;
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
List<API> multiVersionedAPIs = new ArrayList<API>();
Comparator<API> versionComparator = new APIVersionComparator();
Boolean displayMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
String paginationLimit = getAPIManagerConfiguration().
getFirstProperty(APIConstants.API_STORE_APIS_PER_PAGE);
// If the Config exists use it to set the pagination limit
final int maxPaginationLimit;
if (paginationLimit != null) {
// The additional 1 added to the maxPaginationLimit is to help us determine if more
// APIs may exist so that we know that we are unable to determine the actual total
// API count. We will subtract this 1 later on so that it does not interfere with
// the logic of the rest of the application
int pagination = Integer.parseInt(paginationLimit);
// Because the store jaggery pagination logic is 10 results per a page we need to set pagination
// limit to at least 11 or the pagination done at this level will conflict with the store pagination
// leading to some of the APIs not being displayed
if (pagination < 11) {
pagination = 11;
log.warn("Value of '" + APIConstants.API_STORE_APIS_PER_PAGE + "' is too low, defaulting to 11");
}
maxPaginationLimit = start + pagination + 1;
}
// Else if the config is not specified we go with default functionality and load all
else {
maxPaginationLimit = Integer.MAX_VALUE;
}
PaginationContext.init(start, end, "ASC", APIConstants.API_OVERVIEW_NAME, maxPaginationLimit);
criteria = criteria + APIUtil.getORBasedSearchCriteria(apiStatus);
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY);
if (artifactManager != null) {
if (apiStatus != null && apiStatus.length > 0) {
List<GovernanceArtifact> genericArtifacts = GovernanceUtils.findGovernanceArtifacts
(getSearchQuery(criteria), userRegistry, APIConstants.API_RXT_MEDIA_TYPE);
totalLength = PaginationContext.getInstance().getLength();
if (genericArtifacts == null || genericArtifacts.size() == 0) {
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
result.put("isMore", isMore);
return result;
}
// Check to see if we can speculate that there are more APIs to be loaded
if (maxPaginationLimit == totalLength) {
isMore = true; // More APIs exist so we cannot determine the total API count without incurring a
// performance hit
--totalLength; // Remove the additional 1 we added earlier when setting max pagination limit
}
int tempLength = 0;
for (GovernanceArtifact artifact : genericArtifacts) {
API api = null;
try {
api = APIUtil.getAPI(artifact);
} catch (APIManagementException e) {
//log and continue since we want to load the rest of the APIs.
log.error("Error while loading API " + artifact.getAttribute(APIConstants.API_OVERVIEW_NAME),
e);
}
if (api != null) {
if (returnAPITags) {
String artifactPath = GovernanceUtils.getArtifactPath(registry, artifact.getId());
Set<String> tags = new HashSet<String>();
org.wso2.carbon.registry.core.Tag[] tag = registry.getTags(artifactPath);
for (org.wso2.carbon.registry.core.Tag tag1 : tag) {
tags.add(tag1.getTagName());
}
api.addTags(tags);
}
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
multiVersionedAPIs.add(api);
}
}
tempLength++;
if (tempLength >= totalLength) {
break;
}
}
if (!displayMultipleVersions) {
apiSortedSet.addAll(latestPublishedAPIs.values());
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
result.put("isMore", isMore);
return result;
} else {
apiVersionsSortedSet.addAll(multiVersionedAPIs);
result.put("apis", apiVersionsSortedSet);
result.put("totalLength", totalLength);
result.put("isMore", isMore);
return result;
}
}
} else {
String errorMessage = "Artifact manager is null for tenant domain " + tenantDomain
+ " when retrieving all paginated APIs by status.";
log.error(errorMessage);
}
} catch (RegistryException e) {
handleException("Failed to get all published APIs", e);
} catch (UserStoreException e) {
handleException("Failed to get all published APIs", e);
} finally {
PaginationContext.destroy();
}
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
result.put("isMore", isMore);
return result;
}
/**
* The method to get APIs by given status to Store view
*
* @return Set<API> Set of APIs
* @throws APIManagementException
*/
@Override
@Deprecated
public Map<String, Object> getAllPaginatedAPIsByStatus(String tenantDomain,
int start, int end, final String apiStatus, boolean returnAPITags) throws APIManagementException {
try {
if (tenantDomain != null) {
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
}
}finally {
endTenantFlow();
}
Boolean displayAPIsWithMultipleStatus = APIUtil.isAllowDisplayAPIsWithMultipleStatus();
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
//Check the api-manager.xml config file entry <DisplayAllAPIs> value is false
if (APIConstants.PROTOTYPED.equals(apiStatus)) {
listMap.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(apiStatus);
}});
} else {
if (!displayAPIsWithMultipleStatus) {
//Create the search attribute map
listMap.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(apiStatus);
}});
} else {
return getAllPaginatedAPIs(tenantDomain, start, end);
}
}
Map<String,Object> result=new HashMap<String, Object>();
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
SortedSet<API> apiVersionsSortedSet = new TreeSet<API>(new APIVersionComparator());
int totalLength=0;
boolean isMore = false;
try {
Registry userRegistry;
boolean isTenantMode=(tenantDomain != null);
if ((isTenantMode && this.tenantDomain==null) || (isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant store anonymous mode
int tenantId = getTenantId(tenantDomain);
// explicitly load the tenant's registry
APIUtil.loadTenantRegistry(tenantId);
userRegistry = getGovernanceUserRegistry(tenantId);
setUsernameToThreadLocalCarbonContext(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME);
} else {
userRegistry = registry;
setUsernameToThreadLocalCarbonContext(this.username);
}
this.isTenantModeStoreView = isTenantMode;
this.requestedTenant = tenantDomain;
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
List<API> multiVersionedAPIs = new ArrayList<API>();
Comparator<API> versionComparator = new APIVersionComparator();
Boolean displayMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
String paginationLimit = getAPIManagerConfiguration()
.getFirstProperty(APIConstants.API_STORE_APIS_PER_PAGE);
// If the Config exists use it to set the pagination limit
final int maxPaginationLimit;
if (paginationLimit != null) {
// The additional 1 added to the maxPaginationLimit is to help us determine if more
// APIs may exist so that we know that we are unable to determine the actual total
// API count. We will subtract this 1 later on so that it does not interfere with
// the logic of the rest of the application
int pagination = Integer.parseInt(paginationLimit);
// Because the store jaggery pagination logic is 10 results per a page we need to set pagination
// limit to at least 11 or the pagination done at this level will conflict with the store pagination
// leading to some of the APIs not being displayed
if (pagination < 11) {
pagination = 11;
log.warn("Value of '" + APIConstants.API_STORE_APIS_PER_PAGE + "' is too low, defaulting to 11");
}
maxPaginationLimit = start + pagination + 1;
}
// Else if the config is not specified we go with default functionality and load all
else {
maxPaginationLimit = Integer.MAX_VALUE;
}
PaginationContext.init(start, end, "ASC", APIConstants.API_OVERVIEW_NAME, maxPaginationLimit);
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY);
if (artifactManager != null) {
GenericArtifact[] genericArtifacts = artifactManager.findGenericArtifacts(listMap);
totalLength=PaginationContext.getInstance().getLength();
if (genericArtifacts == null || genericArtifacts.length == 0) {
result.put("apis",apiSortedSet);
result.put("totalLength",totalLength);
result.put("isMore", isMore);
return result;
}
// Check to see if we can speculate that there are more APIs to be loaded
if (maxPaginationLimit == totalLength) {
isMore = true; // More APIs exist so we cannot determine the total API count without incurring a
// performance hit
--totalLength; // Remove the additional 1 we added earlier when setting max pagination limit
}
int tempLength=0;
for (GenericArtifact artifact : genericArtifacts) {
if (artifact == null) {
log.error("Failed to retrieve artifact when getting all paginated APIs by status.");
continue;
}
API api = null;
try {
api = APIUtil.getAPI(artifact);
} catch (APIManagementException e) {
//log and continue since we want to load the rest of the APIs.
log.error("Error while loading API " + artifact.getAttribute(APIConstants.API_OVERVIEW_NAME),
e);
}
if (api != null) {
if (returnAPITags) {
String artifactPath = GovernanceUtils.getArtifactPath(registry, artifact.getId());
Set<String> tags = new HashSet<String>();
org.wso2.carbon.registry.core.Tag[] tag = registry.getTags(artifactPath);
for (org.wso2.carbon.registry.core.Tag tag1 : tag) {
tags.add(tag1.getTagName());
}
api.addTags(tags);
}
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
multiVersionedAPIs.add(api);
}
}
tempLength++;
if (tempLength >= totalLength){
break;
}
}
if (!displayMultipleVersions) {
apiSortedSet.addAll(latestPublishedAPIs.values());
result.put("apis",apiSortedSet);
result.put("totalLength",totalLength);
result.put("isMore", isMore);
return result;
} else {
apiVersionsSortedSet.addAll(multiVersionedAPIs);
result.put("apis",apiVersionsSortedSet);
result.put("totalLength",totalLength);
result.put("isMore", isMore);
return result;
}
} else {
String errorMessage = "Artifact manager is null for tenant domain " + tenantDomain
+ " when retrieving APIs by status.";
log.error(errorMessage);
}
} catch (RegistryException e) {
handleException("Failed to get all published APIs", e);
} catch (UserStoreException e) {
handleException("Failed to get all published APIs", e);
} finally {
PaginationContext.destroy();
}
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
result.put("isMore", isMore);
return result;
}
/**
* Re-generates the access token.
* @param oldAccessToken Token to be revoked
* @param clientId Consumer Key for the Application
* @param clientSecret Consumer Secret for the Application
* @param validityTime Desired Validity time for the token
* @param jsonInput Additional parameters if Authorization server needs any.
* @return Renewed Access Token.
* @throws APIManagementException
*/
@Override
public AccessTokenInfo renewAccessToken(String oldAccessToken, String clientId, String clientSecret,
String validityTime, String
requestedScopes[], String jsonInput) throws APIManagementException {
// Create Token Request with parameters provided from UI.
AccessTokenRequest tokenRequest = new AccessTokenRequest();
tokenRequest.setClientId(clientId);
tokenRequest.setClientSecret(clientSecret);
tokenRequest.setValidityPeriod(Long.parseLong(validityTime));
tokenRequest.setTokenToRevoke(oldAccessToken);
tokenRequest.setScope(requestedScopes);
try {
// Populating additional parameters.
tokenRequest = ApplicationUtils.populateTokenRequest(jsonInput, tokenRequest);
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
JSONObject appLogObject = new JSONObject();
appLogObject.put("Re-Generated Keys for application with client Id", clientId);
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.UPDATED, this.username);
return keyManager.getNewApplicationAccessToken(tokenRequest);
} catch (APIManagementException e) {
log.error("Error while re-generating AccessToken", e);
throw e;
}
}
/**
* The method to get All PUBLISHED and DEPRECATED APIs, to Store view
*
* @return Set<API> Set of APIs
* @throws APIManagementException
*/
@Deprecated
public Map<String,Object> getAllPaginatedAPIs(String tenantDomain,int start,int end) throws APIManagementException {
Map<String,Object> result=new HashMap<String, Object>();
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
SortedSet<API> apiVersionsSortedSet = new TreeSet<API>(new APIVersionComparator());
int totalLength=0;
try {
Registry userRegistry;
boolean isTenantMode=(tenantDomain != null);
if ((isTenantMode && this.tenantDomain==null) || (isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant store anonymous mode
int tenantId = getTenantId(tenantDomain);
userRegistry = getGovernanceUserRegistry(tenantId);
setUsernameToThreadLocalCarbonContext(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME);
} else {
userRegistry = registry;
setUsernameToThreadLocalCarbonContext(this.username);
}
this.isTenantModeStoreView = isTenantMode;
this.requestedTenant = tenantDomain;
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
List<API> multiVersionedAPIs = new ArrayList<API>();
Comparator<API> versionComparator = new APIVersionComparator();
Boolean displayMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY);
PaginationContext.init(start, end, "ASC", APIConstants.API_OVERVIEW_NAME, Integer.MAX_VALUE);
boolean noPublishedAPIs = false;
if (artifactManager != null) {
//Create the search attribute map for PUBLISHED APIs
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
listMap.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(APIConstants.PUBLISHED);
}});
GenericArtifact[] genericArtifacts = artifactManager.findGenericArtifacts(listMap);
totalLength = PaginationContext.getInstance().getLength();
if (genericArtifacts == null || genericArtifacts.length == 0) {
noPublishedAPIs = true;
}
int publishedAPICount;
if (genericArtifacts != null) {
for (GenericArtifact artifact : genericArtifacts) {
if (artifact == null) {
log.error("Failed to retrieve artifact when getting all paginated APIs.");
continue;
}
// adding the API provider can mark the latest API .
// String status = artifact.getAttribute(APIConstants.API_OVERVIEW_STATUS);
API api = APIUtil.getAPI(artifact);
if (api != null) {
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
// key = api.getId().getProviderName() + ":" + api.getId().getApiName() + ":" + api.getId()
// .getVersion();
multiVersionedAPIs.add(api);
}
}
}
}
if (!displayMultipleVersions) {
publishedAPICount = latestPublishedAPIs.size();
} else {
publishedAPICount = multiVersionedAPIs.size();
}
if ((start + end) > publishedAPICount) {
if (publishedAPICount > 0) {
/*Starting to retrieve DEPRECATED APIs*/
start = 0;
/* publishedAPICount is always less than end*/
end = end - publishedAPICount;
} else {
start = start - totalLength;
}
PaginationContext.init(start, end, "ASC", APIConstants.API_OVERVIEW_NAME, Integer.MAX_VALUE);
//Create the search attribute map for DEPRECATED APIs
Map<String, List<String>> listMapForDeprecatedAPIs = new HashMap<String, List<String>>();
listMapForDeprecatedAPIs.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(APIConstants.DEPRECATED);
}});
GenericArtifact[] genericArtifactsForDeprecatedAPIs = artifactManager.findGenericArtifacts(listMapForDeprecatedAPIs);
totalLength = totalLength + PaginationContext.getInstance().getLength();
if ((genericArtifactsForDeprecatedAPIs == null || genericArtifactsForDeprecatedAPIs.length == 0) && noPublishedAPIs) {
result.put("apis",apiSortedSet);
result.put("totalLength",totalLength);
return result;
}
if (genericArtifactsForDeprecatedAPIs != null) {
for (GenericArtifact artifact : genericArtifactsForDeprecatedAPIs) {
if (artifact == null) {
log.error("Failed to retrieve artifact when getting deprecated APIs.");
continue;
}
// adding the API provider can mark the latest API .
API api = APIUtil.getAPI(artifact);
if (api != null) {
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
multiVersionedAPIs.add(api);
}
}
}
}
}
if (!displayMultipleVersions) {
for (API api : latestPublishedAPIs.values()) {
apiSortedSet.add(api);
}
result.put("apis",apiSortedSet);
result.put("totalLength",totalLength);
return result;
} else {
apiVersionsSortedSet.addAll(multiVersionedAPIs);
result.put("apis",apiVersionsSortedSet);
result.put("totalLength",totalLength);
return result;
}
} else {
String errorMessage = "Artifact manager is null for tenant domain " + tenantDomain
+ " when retrieving all paginated APIs.";
log.error(errorMessage);
}
} catch (RegistryException e) {
handleException("Failed to get all published APIs", e);
} catch (UserStoreException e) {
handleException("Failed to get all published APIs", e);
}finally {
PaginationContext.destroy();
}
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
return result;
}
@Override
public Set<API> getTopRatedAPIs(int limit) throws APIManagementException {
int returnLimit = 0;
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
try {
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY);
if (artifactManager == null) {
String errorMessage = "Artifact manager is null when retrieving top rated APIs.";
log.error(errorMessage);
throw new APIManagementException(errorMessage);
}
GenericArtifact[] genericArtifacts = artifactManager.getAllGenericArtifacts();
if (genericArtifacts == null || genericArtifacts.length == 0) {
return apiSortedSet;
}
for (GenericArtifact genericArtifact : genericArtifacts) {
String status = APIUtil.getLcStateFromArtifact(genericArtifact);
if (APIConstants.PUBLISHED.equals(status)) {
String artifactPath = genericArtifact.getPath();
float rating = registry.getAverageRating(artifactPath);
if (rating > APIConstants.TOP_TATE_MARGIN && (returnLimit < limit)) {
returnLimit++;
API api = APIUtil.getAPI(genericArtifact, registry);
if (api != null) {
apiSortedSet.add(api);
}
}
}
}
} catch (RegistryException e) {
handleException("Failed to get top rated API", e);
}
return apiSortedSet;
}
/**
* Get the recently added APIs set
*
* @param limit no limit. Return everything else, limit the return list to specified value.
* @return Set<API>
* @throws APIManagementException
*/
@Override
public Set<API> getRecentlyAddedAPIs(int limit, String tenantDomain)
throws APIManagementException {
SortedSet<API> recentlyAddedAPIs = new TreeSet<API>(new APINameComparator());
SortedSet<API> recentlyAddedAPIsWithMultipleVersions = new TreeSet<API>(new APIVersionComparator());
Registry userRegistry;
APIManagerConfiguration config = getAPIManagerConfiguration();
boolean isRecentlyAddedAPICacheEnabled =
Boolean.parseBoolean(config.getFirstProperty(APIConstants.API_STORE_RECENTLY_ADDED_API_CACHE_ENABLE));
PrivilegedCarbonContext.startTenantFlow();
boolean isTenantFlowStarted ;
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
isTenantFlowStarted = true;
} else {
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true);
isTenantFlowStarted = true;
}
try {
boolean isTenantMode = (tenantDomain != null);
if ((isTenantMode && this.tenantDomain == null) || (isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant based store anonymous mode
int tenantId = getTenantId(tenantDomain);
// explicitly load the tenant's registry
APIUtil.loadTenantRegistry(tenantId);
setUsernameToThreadLocalCarbonContext(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME);
isTenantFlowStarted = true;
userRegistry = getGovernanceUserRegistry(tenantId);
} else {
userRegistry = registry;
setUsernameToThreadLocalCarbonContext(this.username);
isTenantFlowStarted = true;
}
if (isRecentlyAddedAPICacheEnabled) {
boolean isStatusChanged = false;
Set<API> recentlyAddedAPI = (Set<API>) Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER)
.getCache(APIConstants.RECENTLY_ADDED_API_CACHE_NAME).get(username + COLON_CHAR + tenantDomain);
if (recentlyAddedAPI != null) {
for (API api : recentlyAddedAPI) {
try {
if (!APIConstants.PUBLISHED.equalsIgnoreCase(userRegistry.get(APIUtil.getAPIPath(api.getId())).getProperty(APIConstants.API_STATUS))) {
isStatusChanged = true;
break;
}
} catch (Exception ex) {
log.error("Error while checking API status for APP " + api.getId().getApiName() + '-' +
api.getId().getVersion(), ex);
}
}
if (!isStatusChanged) {
return recentlyAddedAPI;
}
}
}
PaginationContext.init(0, limit, APIConstants.REGISTRY_ARTIFACT_SEARCH_DESC_ORDER,
APIConstants.CREATED_DATE, Integer.MAX_VALUE);
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
listMap.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(APIConstants.PUBLISHED);
}});
listMap.put(APIConstants.STORE_VIEW_ROLES, getUserRoleList());
String searchCriteria = APIConstants.LCSTATE_SEARCH_KEY + "= (" + APIConstants.PUBLISHED + ")";
//Find UUID
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY);
if (artifactManager != null) {
GenericArtifact[] genericArtifacts = artifactManager.findGovernanceArtifacts(getSearchQuery(searchCriteria));
SortedSet<API> allAPIs = new TreeSet<API>(new APINameComparator());
for (GenericArtifact artifact : genericArtifacts) {
API api = null;
try {
api = APIUtil.getAPI(artifact);
} catch (APIManagementException e) {
//just log and continue since we want to go through the other APIs as well.
log.error("Error loading API " + artifact.getAttribute(APIConstants.API_OVERVIEW_NAME), e);
}
if (api != null) {
allAPIs.add(api);
}
}
if (!APIUtil.isAllowDisplayMultipleVersions()) {
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
Comparator<API> versionComparator = new APIVersionComparator();
String key;
for (API api : allAPIs) {
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same
// name, make sure this one has a higher version
// number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
}
recentlyAddedAPIs.addAll(latestPublishedAPIs.values());
if (isRecentlyAddedAPICacheEnabled) {
Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER)
.getCache(APIConstants.RECENTLY_ADDED_API_CACHE_NAME)
.put(username + COLON_CHAR + tenantDomain, allAPIs);
}
return recentlyAddedAPIs;
} else {
recentlyAddedAPIsWithMultipleVersions.addAll(allAPIs);
if (isRecentlyAddedAPICacheEnabled) {
Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER)
.getCache(APIConstants.RECENTLY_ADDED_API_CACHE_NAME)
.put(username + COLON_CHAR + tenantDomain, allAPIs);
}
return recentlyAddedAPIsWithMultipleVersions;
}
} else {
String errorMessage = "Artifact manager is null when retrieving recently added APIs for tenant domain "
+ tenantDomain;
log.error(errorMessage);
}
} catch (RegistryException e) {
handleException("Failed to get all published APIs", e);
} catch (UserStoreException e) {
handleException("Failed to get all published APIs", e);
} finally {
PaginationContext.destroy();
if (isTenantFlowStarted) {
endTenantFlow();
}
}
return recentlyAddedAPIs;
}
@Override
public Set<Tag> getAllTags(String requestedTenantDomain) throws APIManagementException {
this.isTenantModeStoreView = (requestedTenantDomain != null);
if(requestedTenantDomain != null){
this.requestedTenant = requestedTenantDomain;
}
/* We keep track of the lastUpdatedTime of the TagCache to determine its freshness.
*/
long lastUpdatedTimeAtStart = lastUpdatedTime;
long currentTimeAtStart = System.currentTimeMillis();
if(isTagCacheEnabled && ( (currentTimeAtStart- lastUpdatedTimeAtStart) < tagCacheValidityTime)){
if(tagSet != null){
return tagSet;
}
}
TreeSet<Tag> tempTagSet = new TreeSet<Tag>(new Comparator<Tag>() {
@Override
public int compare(Tag o1, Tag o2) {
return o1.getName().compareTo(o2.getName());
}
});
Registry userRegistry = null;
boolean isTenantFlowStarted = false;
String tagsQueryPath = null;
try {
tagsQueryPath = RegistryConstants.QUERIES_COLLECTION_PATH + "/tag-summary";
Map<String, String> params = new HashMap<String, String>();
params.put(RegistryConstants.RESULT_TYPE_PROPERTY_NAME, RegistryConstants.TAG_SUMMARY_RESULT_TYPE);
//as a tenant, I'm browsing my own Store or I'm browsing a Store of another tenant..
if ((this.isTenantModeStoreView && this.tenantDomain==null) || (this.isTenantModeStoreView && isTenantDomainNotMatching(requestedTenantDomain))) {//Tenant based store anonymous mode
int tenantId = getTenantId(this.requestedTenant);
userRegistry = ServiceReferenceHolder.getInstance().getRegistryService().
getGovernanceUserRegistry(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME, tenantId);
} else {
userRegistry = registry;
}
Map<String, Tag> tagsData = new HashMap<String, Tag>();
try {
PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(((UserRegistry)userRegistry).getUserName());
if (requestedTenant != null ) {
isTenantFlowStarted = startTenantFlowForTenantDomain(requestedTenant);
PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(((UserRegistry)userRegistry).getUserName());
}
Map <String, List<String>> criteriaPublished = new HashMap<String, List<String>>();
criteriaPublished.put(APIConstants.LCSTATE_SEARCH_KEY, new ArrayList<String>() {{
add(APIConstants.PUBLISHED);
}});
//rxt api media type
List<TermData> termsPublished = GovernanceUtils
.getTermDataList(criteriaPublished, APIConstants.API_OVERVIEW_TAG,
APIConstants.API_RXT_MEDIA_TYPE, true);
if(termsPublished != null){
for(TermData data : termsPublished){
tempTagSet.add(new Tag(data.getTerm(), (int)data.getFrequency()));
}
}
Map<String, List<String>> criteriaPrototyped = new HashMap<String, List<String>>();
criteriaPrototyped.put(APIConstants.LCSTATE_SEARCH_KEY, new ArrayList<String>() {{
add(APIConstants.PROTOTYPED);
}});
//rxt api media type
List<TermData> termsPrototyped = GovernanceUtils
.getTermDataList(criteriaPrototyped, APIConstants.API_OVERVIEW_TAG,
APIConstants.API_RXT_MEDIA_TYPE, true);
if(termsPrototyped != null){
for(TermData data : termsPrototyped){
tempTagSet.add(new Tag(data.getTerm(), (int)data.getFrequency()));
}
}
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
synchronized (tagCacheMutex) {
lastUpdatedTime = System.currentTimeMillis();
this.tagSet = tempTagSet;
}
} catch (RegistryException e) {
try {
//Before a tenant login to the store or publisher at least one time,
//a registry exception is thrown when the tenant store is accessed in anonymous mode.
//This fix checks whether query resource available in the registry. If not
// give a warn.
if (userRegistry != null && !userRegistry.resourceExists(tagsQueryPath)) {
log.warn("Failed to retrieve tags query resource at " + tagsQueryPath);
return tagSet == null ? Collections.EMPTY_SET : tagSet;
}
} catch (RegistryException e1) {
// Even if we should ignore this exception, we are logging this as a warn log.
// The reason is that, this error happens when we try to add some additional logs in an error
// scenario and it does not affect the execution path.
log.warn("Unable to execute the resource exist method for tags query resource path : " + tagsQueryPath,
e1);
}
handleException("Failed to get all the tags", e);
} catch (UserStoreException e) {
handleException("Failed to get all the tags", e);
}
return tagSet;
}
@Override
public Set<Tag> getTagsWithAttributes(String tenantDomain) throws APIManagementException {
// Fetch the all the tags first.
Set<Tag> tags = getAllTags(tenantDomain);
// For each and every tag get additional attributes from the registry.
String descriptionPathPattern = APIConstants.TAGS_INFO_ROOT_LOCATION + "/%s/description.txt";
String thumbnailPathPattern = APIConstants.TAGS_INFO_ROOT_LOCATION + "/%s/thumbnail.png";
//if the tenantDomain is not specified super tenant domain is used
if (StringUtils.isBlank(tenantDomain)) {
try {
tenantDomain = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager().getSuperTenantDomain();
} catch (org.wso2.carbon.user.core.UserStoreException e) {
handleException("Cannot get super tenant domain name", e);
}
}
//get the registry instance related to the tenant domain
UserRegistry govRegistry = null;
try {
int tenantId = getTenantId(tenantDomain);
RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService();
govRegistry = registryService.getGovernanceSystemRegistry(tenantId);
} catch (UserStoreException e) {
handleException("Cannot get tenant id for tenant domain name:" + tenantDomain, e);
} catch (RegistryException e) {
handleException("Cannot get registry for tenant domain name:" + tenantDomain, e);
}
if (govRegistry != null) {
for (Tag tag : tags) {
// Get the description.
Resource descriptionResource = null;
String descriptionPath = String.format(descriptionPathPattern, tag.getName());
try {
if (govRegistry.resourceExists(descriptionPath)) {
descriptionResource = govRegistry.get(descriptionPath);
}
} catch (RegistryException e) {
//warn and proceed to the next tag
log.warn(String.format("Error while querying the existence of the description for the tag '%s'",
tag.getName()), e);
}
// The resource is assumed to be a byte array since its the content
// of a text file.
if (descriptionResource != null) {
try {
String description = new String((byte[]) descriptionResource.getContent(),
Charset.defaultCharset());
tag.setDescription(description);
} catch (ClassCastException e) {
//added warnings as it can then proceed to load rest of resources/tags
log.warn(String.format("Cannot cast content of %s to byte[]", descriptionPath), e);
} catch (RegistryException e) {
//added warnings as it can then proceed to load rest of resources/tags
log.warn(String.format("Cannot read content of %s", descriptionPath), e);
}
}
// Checks whether the thumbnail exists.
String thumbnailPath = String.format(thumbnailPathPattern, tag.getName());
try {
boolean isThumbnailExists = govRegistry.resourceExists(thumbnailPath);
tag.setThumbnailExists(isThumbnailExists);
if (isThumbnailExists) {
tag.setThumbnailUrl(APIUtil.getRegistryResourcePathForUI(
APIConstants.RegistryResourceTypesForUI.TAG_THUMBNAIL, tenantDomain, thumbnailPath));
}
} catch (RegistryException e) {
//warn and then proceed to load rest of tags
log.warn(String.format("Error while querying the existence of %s", thumbnailPath), e);
}
}
}
return tags;
}
@Override
public void rateAPI(APIIdentifier apiId, APIRating rating,
String user) throws APIManagementException {
apiMgtDAO.addRating(apiId, rating.getRating(), user);
}
@Override
public void removeAPIRating(APIIdentifier apiId, String user) throws APIManagementException {
apiMgtDAO.removeAPIRating(apiId, user);
}
@Override
public int getUserRating(APIIdentifier apiId, String user) throws APIManagementException {
return apiMgtDAO.getUserRating(apiId, user);
}
@Override
public Set<API> getPublishedAPIsByProvider(String providerId, int limit)
throws APIManagementException {
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
SortedSet<API> apiVersionsSortedSet = new TreeSet<API>(new APIVersionComparator());
try {
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
List<API> multiVersionedAPIs = new ArrayList<API>();
Comparator<API> versionComparator = new APIVersionComparator();
Boolean displayMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
Boolean displayAPIsWithMultipleStatus = APIUtil.isAllowDisplayAPIsWithMultipleStatus();
String providerPath = APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR + providerId;
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY);
if (artifactManager == null) {
String errorMessage =
"Artifact manager is null when retrieving published APIs by provider ID " + providerId;
log.error(errorMessage);
throw new APIManagementException(errorMessage);
}
Association[] associations = registry.getAssociations(providerPath, APIConstants.PROVIDER_ASSOCIATION);
if (associations.length < limit || limit == -1) {
limit = associations.length;
}
for (int i = 0; i < limit; i++) {
Association association = associations[i];
String apiPath = association.getDestinationPath();
Resource resource = registry.get(apiPath);
String apiArtifactId = resource.getUUID();
if (apiArtifactId != null) {
GenericArtifact artifact = artifactManager.getGenericArtifact(apiArtifactId);
// check the API status
String status = APIUtil.getLcStateFromArtifact(artifact);
API api = null;
//Check the api-manager.xml config file entry <DisplayAllAPIs> value is false
if (!displayAPIsWithMultipleStatus) {
// then we are only interested in published APIs here...
if (APIConstants.PUBLISHED.equals(status)) {
api = APIUtil.getAPI(artifact);
}
} else { // else we are interested in both deprecated/published APIs here...
if (APIConstants.PUBLISHED.equals(status) || APIConstants.DEPRECATED.equals(status)) {
api = APIUtil.getAPI(artifact);
}
}
if (api != null) {
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
multiVersionedAPIs.add(api);
}
}
} else {
throw new GovernanceException("artifact id is null of " + apiPath);
}
}
if (!displayMultipleVersions) {
apiSortedSet.addAll(latestPublishedAPIs.values());
return apiSortedSet;
} else {
apiVersionsSortedSet.addAll(multiVersionedAPIs);
return apiVersionsSortedSet;
}
} catch (RegistryException e) {
handleException("Failed to get Published APIs for provider : " + providerId, e);
}
return null;
}
@Override
public Set<API> getPublishedAPIsByProvider(String providerId, String loggedUsername, int limit, String apiOwner,
String apiBizOwner) throws APIManagementException {
try {
Boolean allowMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
Boolean showAllAPIs = APIUtil.isAllowDisplayAPIsWithMultipleStatus();
String providerDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(providerId));
int tenantId = getTenantId(providerDomain);
final Registry registry = ServiceReferenceHolder.getInstance().
getRegistryService().getGovernanceSystemRegistry(tenantId);
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry,
APIConstants.API_KEY);
if (artifactManager == null) {
String errorMessage =
"Artifact manager is null when retrieving all published APIs by provider ID " + providerId;
log.error(errorMessage);
throw new APIManagementException(errorMessage);
}
int publishedAPICount = 0;
Map<String, API> apiCollection = new HashMap<String, API>();
if(apiBizOwner != null && !apiBizOwner.isEmpty()){
try {
final String bizOwner = apiBizOwner;
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
listMap.put(APIConstants.API_OVERVIEW_BUSS_OWNER, new ArrayList<String>() {{
add(bizOwner);
}});
PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(this.username);
GenericArtifact[] genericArtifacts = artifactManager.findGenericArtifacts(listMap);
if(genericArtifacts != null && genericArtifacts.length > 0){
for(GenericArtifact artifact : genericArtifacts){
if (publishedAPICount >= limit) {
break;
}
if(isCandidateAPI(artifact.getPath(), loggedUsername, artifactManager, tenantId, showAllAPIs,
allowMultipleVersions, apiOwner, providerId, registry, apiCollection)){
publishedAPICount += 1;
}
}
}
} catch (GovernanceException e) {
log.error("Error while finding APIs by business owner " + apiBizOwner, e);
return null;
}
}
else{
String providerPath = APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR + providerId;
Association[] associations = registry.getAssociations(providerPath, APIConstants.PROVIDER_ASSOCIATION);
for (Association association : associations) {
if (publishedAPICount >= limit) {
break;
}
String apiPath = association.getDestinationPath();
if(isCandidateAPI(apiPath, loggedUsername, artifactManager, tenantId, showAllAPIs,
allowMultipleVersions, apiOwner, providerId, registry, apiCollection)){
publishedAPICount += 1;
}
}
}
return new HashSet<API>(apiCollection.values());
} catch (RegistryException e) {
handleException("Failed to get Published APIs for provider : " + providerId, e);
return null;
} catch (org.wso2.carbon.user.core.UserStoreException e) {
handleException("Failed to get Published APIs for provider : " + providerId, e);
return null;
} catch (UserStoreException e) {
handleException("Failed to get Published APIs for provider : " + providerId, e);
return null;
}
}
private boolean isCandidateAPI(String apiPath, String loggedUsername, GenericArtifactManager artifactManager,
int tenantId, boolean showAllAPIs, boolean allowMultipleVersions,
String apiOwner, String providerId, Registry registry, Map<String, API> apiCollection)
throws UserStoreException, RegistryException, APIManagementException {
AuthorizationManager manager = ServiceReferenceHolder.getInstance().getRealmService().
getTenantUserRealm(tenantId).getAuthorizationManager();
Comparator<API> versionComparator = new APIVersionComparator();
Resource resource;
String path = RegistryUtils.getAbsolutePath(RegistryContext.getBaseInstance(),
APIUtil.getMountedPath(RegistryContext.getBaseInstance(),
RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) +
apiPath);
boolean checkAuthorized;
String userNameWithoutDomain = loggedUsername;
if (!loggedUsername.isEmpty() && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(super.tenantDomain)) {
String[] nameParts = loggedUsername.split("@");
userNameWithoutDomain = nameParts[0];
}
int loggedInUserTenantDomain = -1;
if(!StringUtils.isEmpty(loggedUsername)) {
loggedInUserTenantDomain = APIUtil.getTenantId(loggedUsername);
}
if (loggedUsername.isEmpty()) {
// Anonymous user is viewing.
checkAuthorized = manager.isRoleAuthorized(APIConstants.ANONYMOUS_ROLE, path, ActionConstants.GET);
} else if (tenantId != loggedInUserTenantDomain) {
//Cross tenant scenario
providerId = APIUtil.replaceEmailDomainBack(providerId);
String[] nameParts = providerId.split("@");
String provideNameWithoutDomain = nameParts[0];
checkAuthorized = manager.isUserAuthorized(provideNameWithoutDomain, path, ActionConstants.GET);
} else {
// Some user is logged in also user and api provider tenant domain are same.
checkAuthorized = manager.isUserAuthorized(userNameWithoutDomain, path, ActionConstants.GET);
}
String apiArtifactId = null;
if (checkAuthorized) {
resource = registry.get(apiPath);
apiArtifactId = resource.getUUID();
}
if (apiArtifactId != null) {
GenericArtifact artifact = artifactManager.getGenericArtifact(apiArtifactId);
// check the API status
String status = APIUtil.getLcStateFromArtifact(artifact);
API api = null;
//Check the api-manager.xml config file entry <DisplayAllAPIs> value is false
if (!showAllAPIs) {
// then we are only interested in published APIs here...
if (APIConstants.PUBLISHED.equals(status)) {
api = APIUtil.getAPI(artifact);
}
} else { // else we are interested in both deprecated/published APIs here...
if (APIConstants.PUBLISHED.equals(status) || APIConstants.DEPRECATED.equals(status)) {
api = APIUtil.getAPI(artifact);
}
}
if (api != null) {
String apiVisibility = api.getVisibility();
if(!StringUtils.isEmpty(apiVisibility) && !APIConstants.API_GLOBAL_VISIBILITY.equalsIgnoreCase(apiVisibility)) {
String providerDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(providerId));
String loginUserDomain = MultitenantUtils.getTenantDomain(loggedUsername);
if(!StringUtils.isEmpty(providerDomain) && !StringUtils.isEmpty(loginUserDomain)
&& !providerDomain.equals(loginUserDomain)){
return false;
}
}
// apiOwner is the value coming from front end and compared against the API instance
if (apiOwner != null && !apiOwner.isEmpty()) {
if (APIUtil.replaceEmailDomainBack(providerId).equals(APIUtil.replaceEmailDomainBack(apiOwner)) &&
api.getApiOwner() != null && !api.getApiOwner().isEmpty() &&
!APIUtil.replaceEmailDomainBack(apiOwner)
.equals(APIUtil.replaceEmailDomainBack(api.getApiOwner()))) {
return false; // reject remote APIs when local admin user's API selected
} else if (!APIUtil.replaceEmailDomainBack(providerId).equals(APIUtil.replaceEmailDomainBack(apiOwner)) &&
!APIUtil.replaceEmailDomainBack(apiOwner)
.equals(APIUtil.replaceEmailDomainBack(api.getApiOwner()))) {
return false; // reject local admin's APIs when remote API selected
}
}
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!allowMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = apiCollection.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
apiCollection.put(key, api);
return true;
}
} else {
// We haven't seen this API before
apiCollection.put(key, api);
return true;
}
} else { //If allow showing multiple versions of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName() + COLON_CHAR + api.getId()
.getVersion();
//we're not really interested in the key, so generate one for the sake of adding this element to
//the map.
key = key + '_' + apiCollection.size();
apiCollection.put(key, api);
return true;
}
}
}
return false;
}
@Override
public Map<String,Object> searchPaginatedAPIs(String searchTerm, String searchType, String requestedTenantDomain,int start,int end, boolean isLazyLoad)
throws APIManagementException {
Map<String,Object> result = new HashMap<String,Object>();
boolean isTenantFlowStarted = false;
try {
boolean isTenantMode=(requestedTenantDomain != null);
if (isTenantMode && !org.wso2.carbon.base.MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(requestedTenantDomain)) {
isTenantFlowStarted = true;
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(requestedTenantDomain, true);
} else {
requestedTenantDomain = org.wso2.carbon.base.MultitenantConstants.SUPER_TENANT_DOMAIN_NAME;
isTenantFlowStarted = true;
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(requestedTenantDomain, true);
}
Registry userRegistry;
int tenantIDLocal = 0;
String userNameLocal = this.username;
if ((isTenantMode && this.tenantDomain==null) || (isTenantMode && isTenantDomainNotMatching(requestedTenantDomain))) {//Tenant store anonymous mode
tenantIDLocal = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(requestedTenantDomain);
userRegistry = ServiceReferenceHolder.getInstance().
getRegistryService().getGovernanceUserRegistry(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME, tenantIDLocal);
userNameLocal = CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME;
} else {
userRegistry = this.registry;
tenantIDLocal = tenantId;
}
PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(userNameLocal);
if (APIConstants.DOCUMENTATION_SEARCH_TYPE_PREFIX.equalsIgnoreCase(searchType)) {
Map<Documentation, API> apiDocMap =
APIUtil.searchAPIsByDoc(userRegistry, tenantIDLocal, userNameLocal, searchTerm,
APIConstants.STORE_CLIENT);
result.put("apis", apiDocMap);
/*Pagination for Document search results is not supported yet, hence length is sent as end-start*/
if (apiDocMap.isEmpty()) {
result.put("length", 0);
} else {
result.put("length", end-start);
}
}
else if ("subcontext".equalsIgnoreCase(searchType)) {
result = APIUtil.searchAPIsByURLPattern(userRegistry, searchTerm, start,end); ;
}else {
result=searchPaginatedAPIs(userRegistry, searchTerm, searchType,start,end,isLazyLoad);
}
} catch (Exception e) {
handleException("Failed to Search APIs", e);
} finally {
if (isTenantFlowStarted) {
PrivilegedCarbonContext.endTenantFlow();
}
}
return result;
}
/**
* Pagination API search based on solr indexing
*
* @param registry
* @param searchTerm
* @param searchType
* @return
* @throws APIManagementException
*/
public Map<String,Object> searchPaginatedAPIs(Registry registry, String searchTerm, String searchType,int start,int end, boolean limitAttributes) throws APIManagementException {
SortedSet<API> apiSet = new TreeSet<API>(new APINameComparator());
List<API> apiList = new ArrayList<API>();
searchTerm = searchTerm.trim();
Map<String,Object> result=new HashMap<String, Object>();
int totalLength=0;
boolean isMore = false;
String criteria=APIConstants.API_OVERVIEW_NAME;
try {
String paginationLimit = getAPIManagerConfiguration()
.getFirstProperty(APIConstants.API_STORE_APIS_PER_PAGE);
// If the Config exists use it to set the pagination limit
final int maxPaginationLimit;
if (paginationLimit != null) {
// The additional 1 added to the maxPaginationLimit is to help us determine if more
// APIs may exist so that we know that we are unable to determine the actual total
// API count. We will subtract this 1 later on so that it does not interfere with
// the logic of the rest of the application
int pagination = Integer.parseInt(paginationLimit);
// Because the store jaggery pagination logic is 10 results per a page we need to set pagination
// limit to at least 11 or the pagination done at this level will conflict with the store pagination
// leading to some of the APIs not being displayed
if (pagination < 11) {
pagination = 11;
log.warn("Value of '" + APIConstants.API_STORE_APIS_PER_PAGE + "' is too low, defaulting to 11");
}
maxPaginationLimit = start + pagination + 1;
}
// Else if the config is not specified we go with default functionality and load all
else {
maxPaginationLimit = Integer.MAX_VALUE;
}
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY);
PaginationContext.init(start, end, "ASC", APIConstants.API_OVERVIEW_NAME, maxPaginationLimit);
if (artifactManager != null) {
if (APIConstants.API_PROVIDER.equalsIgnoreCase(searchType)) {
criteria = APIConstants.API_OVERVIEW_PROVIDER;
searchTerm = searchTerm.replaceAll("@", "-AT-");
} else if (APIConstants.API_VERSION_LABEL.equalsIgnoreCase(searchType)) {
criteria = APIConstants.API_OVERVIEW_VERSION;
} else if (APIConstants.API_CONTEXT.equalsIgnoreCase(searchType)) {
criteria = APIConstants.API_OVERVIEW_CONTEXT;
} else if (APIConstants.API_DESCRIPTION.equalsIgnoreCase(searchType)) {
criteria = APIConstants.API_OVERVIEW_DESCRIPTION;
} else if (APIConstants.API_TAG.equalsIgnoreCase(searchType)) {
criteria = APIConstants.API_OVERVIEW_TAG;
}
//Create the search attribute map for PUBLISHED APIs
final String searchValue = searchTerm;
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
listMap.put(criteria, new ArrayList<String>() {{
add(searchValue);
}});
boolean displayAPIsWithMultipleStatus = APIUtil.isAllowDisplayAPIsWithMultipleStatus();
//This is due to take only the published APIs from the search if there is no need to return APIs with
//multiple status. This is because pagination is breaking when we do a another filtering with the API Status
if (!displayAPIsWithMultipleStatus) {
listMap.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(APIConstants.PUBLISHED);
}});
}
GenericArtifact[] genericArtifacts = artifactManager.findGenericArtifacts(listMap);
totalLength = PaginationContext.getInstance().getLength();
boolean isFound = true;
if (genericArtifacts == null || genericArtifacts.length == 0) {
if (APIConstants.API_OVERVIEW_PROVIDER.equals(criteria)) {
genericArtifacts = searchAPIsByOwner(artifactManager, searchValue);
if (genericArtifacts == null || genericArtifacts.length == 0) {
isFound = false;
}
}
else {
isFound = false;
}
}
if (!isFound) {
result.put("apis", apiSet);
result.put("length", 0);
result.put("isMore", isMore);
return result;
}
// Check to see if we can speculate that there are more APIs to be loaded
if (maxPaginationLimit == totalLength) {
isMore = true; // More APIs exist, cannot determine total API count without incurring perf hit
--totalLength; // Remove the additional 1 added earlier when setting max pagination limit
}
int tempLength =0;
for (GenericArtifact artifact : genericArtifacts) {
String status = artifact.getAttribute(APIConstants.API_OVERVIEW_STATUS);
if (APIUtil.isAllowDisplayAPIsWithMultipleStatus()) {
if (APIConstants.PROTOTYPED.equals(status) || APIConstants.PUBLISHED.equals(status)
|| APIConstants.DEPRECATED.equals(status)) {
API resultAPI;
if (limitAttributes) {
resultAPI = APIUtil.getAPI(artifact);
} else {
resultAPI = APIUtil.getAPI(artifact, registry);
}
if (resultAPI != null) {
apiList.add(resultAPI);
}
}
} else {
if (APIConstants.PROTOTYPED.equals(status) || APIConstants.PUBLISHED.equals(status)) {
API resultAPI;
if (limitAttributes) {
resultAPI = APIUtil.getAPI(artifact);
} else {
resultAPI = APIUtil.getAPI(artifact, registry);
}
if (resultAPI != null) {
apiList.add(resultAPI);
}
}
}
// Ensure the APIs returned matches the length, there could be an additional API
// returned due incrementing the pagination limit when getting from registry
tempLength++;
if (tempLength >= totalLength){
break;
}
}
apiSet.addAll(apiList);
}
} catch (RegistryException e) {
handleException("Failed to search APIs with type", e);
}
result.put("apis",apiSet);
result.put("length",totalLength);
result.put("isMore", isMore);
return result;
}
private GenericArtifact[] searchAPIsByOwner(GenericArtifactManager artifactManager, final String searchValue) throws GovernanceException {
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
listMap.put(APIConstants.API_OVERVIEW_OWNER, new ArrayList<String>() {
{
add(searchValue);
}
});
return artifactManager.findGenericArtifacts(listMap);
}
/**
*This method will delete application key mapping table and application registration table.
*@param applicationName application Name
*@param tokenType Token Type.
*@param groupId group id.
*@param userName user name.
*@return
*@throws APIManagementException
*/
@Override
public void cleanUpApplicationRegistration(String applicationName ,String tokenType ,String groupId ,String
userName) throws APIManagementException{
Application application = apiMgtDAO.getApplicationByName(applicationName, userName, groupId);
String applicationId = String.valueOf(application.getId());
cleanUpApplicationRegistrationByApplicationId(applicationId, tokenType);
}
/*
* @see super.cleanUpApplicationRegistrationByApplicationId
* */
@Override
public void cleanUpApplicationRegistrationByApplicationId(String applicationId, String tokenType) throws APIManagementException {
apiMgtDAO.deleteApplicationRegistration(applicationId , tokenType);
apiMgtDAO.deleteApplicationKeyMappingByApplicationIdAndType(applicationId, tokenType);
apiMgtDAO.getConsumerkeyByApplicationIdAndKeyType(applicationId, tokenType);
}
/**
*
* @param jsonString this string will contain oAuth app details
* @param userName user name of logged in user.
* @param clientId this is the consumer key of oAuthApplication
* @param applicationName this is the APIM appication name.
* @param keyType
* @param tokenType this is theApplication Token Type. This can be either default or jwt.
* @return
* @throws APIManagementException
*/
@Override
public Map<String, Object> mapExistingOAuthClient(String jsonString, String userName, String clientId,
String applicationName, String keyType, String tokenType)
throws APIManagementException {
String callBackURL = null;
OAuthAppRequest oauthAppRequest = ApplicationUtils.createOauthAppRequest(applicationName, clientId, callBackURL,
"default",
jsonString, tokenType);
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
// Checking if clientId is mapped with another application.
if (apiMgtDAO.isMappingExistsforConsumerKey(clientId)) {
String message = "Consumer Key " + clientId + " is used for another Application.";
log.error(message);
throw new APIManagementException(message);
}
log.debug("Client ID not mapped previously with another application.");
//createApplication on oAuthorization server.
OAuthApplicationInfo oAuthApplication = keyManager.mapOAuthApplication(oauthAppRequest);
//Do application mapping with consumerKey.
apiMgtDAO.createApplicationKeyTypeMappingForManualClients(keyType, applicationName, userName, clientId);
AccessTokenInfo tokenInfo;
if (oAuthApplication.getJsonString().contains(APIConstants.GRANT_TYPE_CLIENT_CREDENTIALS)) {
AccessTokenRequest tokenRequest = ApplicationUtils.createAccessTokenRequest(oAuthApplication, null);
tokenInfo = keyManager.getNewApplicationAccessToken(tokenRequest);
} else {
tokenInfo = new AccessTokenInfo();
tokenInfo.setAccessToken("");
tokenInfo.setValidityPeriod(0L);
String[] noScopes = new String[] {"N/A"};
tokenInfo.setScope(noScopes);
oAuthApplication.addParameter("tokenScope", Arrays.toString(noScopes));
}
Map<String, Object> keyDetails = new HashMap<String, Object>();
if (tokenInfo != null) {
keyDetails.put("validityTime", Long.toString(tokenInfo.getValidityPeriod()));
keyDetails.put("accessToken", tokenInfo.getAccessToken());
keyDetails.put("tokenDetails", tokenInfo.getJSONString());
}
keyDetails.put("consumerKey", oAuthApplication.getClientId());
keyDetails.put("consumerSecret", oAuthApplication.getParameter("client_secret"));
keyDetails.put("appDetails", oAuthApplication.getJsonString());
return keyDetails;
}
/** returns the SubscribedAPI object which is related to the subscriptionId
*
* @param subscriptionId subscription id
* @return
* @throws APIManagementException
*/
@Override
public SubscribedAPI getSubscriptionById(int subscriptionId) throws APIManagementException {
return apiMgtDAO.getSubscriptionById(subscriptionId);
}
@Override
public Set<SubscribedAPI> getSubscribedAPIs(Subscriber subscriber) throws APIManagementException {
return getSubscribedAPIs(subscriber, null);
}
@Override
public Set<SubscribedAPI> getSubscribedAPIs(Subscriber subscriber, String groupingId) throws APIManagementException {
Set<SubscribedAPI> originalSubscribedAPIs;
Set<SubscribedAPI> subscribedAPIs = new HashSet<SubscribedAPI>();
try {
originalSubscribedAPIs = apiMgtDAO.getSubscribedAPIs(subscriber, groupingId);
if (originalSubscribedAPIs != null && !originalSubscribedAPIs.isEmpty()) {
Map<String, Tier> tiers = APIUtil.getTiers(tenantId);
for (SubscribedAPI subscribedApi : originalSubscribedAPIs) {
Tier tier = tiers.get(subscribedApi.getTier().getName());
subscribedApi.getTier().setDisplayName(tier != null ? tier.getDisplayName() : subscribedApi.getTier().getName());
subscribedAPIs.add(subscribedApi);
}
}
} catch (APIManagementException e) {
handleException("Failed to get APIs of " + subscriber.getName(), e);
}
return subscribedAPIs;
}
@Override
public Set<SubscribedAPI> getSubscribedAPIs(Subscriber subscriber, String applicationName, String groupingId)
throws APIManagementException {
Set<SubscribedAPI> subscribedAPIs = null;
try {
subscribedAPIs = apiMgtDAO.getSubscribedAPIs(subscriber, applicationName, groupingId);
if (subscribedAPIs != null && !subscribedAPIs.isEmpty()) {
Map<String, Tier> tiers = APIUtil.getTiers(tenantId);
for (SubscribedAPI subscribedApi : subscribedAPIs) {
Tier tier = tiers.get(subscribedApi.getTier().getName());
subscribedApi.getTier().setDisplayName(tier != null ? tier.getDisplayName() : subscribedApi
.getTier().getName());
// We do not need to add the modified object again.
}
}
} catch (APIManagementException e) {
handleException("Failed to get APIs of " + subscriber.getName() + " under application " + applicationName, e);
}
return subscribedAPIs;
}
/*
*@see super.getSubscribedAPIsByApplicationId
*
*/
@Override
public Set<SubscribedAPI> getSubscribedAPIsByApplicationId(Subscriber subscriber, int applicationId, String groupingId) throws APIManagementException {
Set<SubscribedAPI> subscribedAPIs = null;
try {
subscribedAPIs = apiMgtDAO.getSubscribedAPIsByApplicationId(subscriber, applicationId, groupingId);
if (subscribedAPIs != null && !subscribedAPIs.isEmpty()) {
Map<String, Tier> tiers = APIUtil.getTiers(tenantId);
for (SubscribedAPI subscribedApi : subscribedAPIs) {
Tier tier = tiers.get(subscribedApi.getTier().getName());
subscribedApi.getTier().setDisplayName(tier != null ? tier.getDisplayName() : subscribedApi
.getTier().getName());
// We do not need to add the modified object again.
Set<APIKey> keys = getApplicationKeys(subscribedApi.getApplication().getId());
for (APIKey key : keys) {
subscribedApi.getApplication().addKey(key);
}
}
}
} catch (APIManagementException e) {
handleException("Failed to get APIs of " + subscriber.getName() + " under application " + applicationId, e);
}
return subscribedAPIs;
}
@Override
public Set<SubscribedAPI> getPaginatedSubscribedAPIs(Subscriber subscriber, String applicationName,
int startSubIndex, int endSubIndex, String groupingId)
throws APIManagementException {
Set<SubscribedAPI> subscribedAPIs = null;
try {
subscribedAPIs = apiMgtDAO.getPaginatedSubscribedAPIs(subscriber, applicationName, startSubIndex,
endSubIndex, groupingId);
if (subscribedAPIs != null && !subscribedAPIs.isEmpty()) {
Map<String, Tier> tiers = APIUtil.getTiers(tenantId);
for (SubscribedAPI subscribedApi : subscribedAPIs) {
Tier tier = tiers.get(subscribedApi.getTier().getName());
subscribedApi.getTier().setDisplayName(tier != null ? tier.getDisplayName() : subscribedApi
.getTier().getName());
// We do not need to add the modified object again.
// subscribedAPIs.add(subscribedApi);
}
}
} catch (APIManagementException e) {
handleException("Failed to get APIs of " + subscriber.getName() + " under application " + applicationName, e);
}
return subscribedAPIs;
}
@Override
public Set<SubscribedAPI> getPaginatedSubscribedAPIs(Subscriber subscriber, int applicationId, int startSubIndex,
int endSubIndex, String groupingId) throws APIManagementException {
Set<SubscribedAPI> subscribedAPIs = null;
try {
subscribedAPIs = apiMgtDAO.getPaginatedSubscribedAPIs(subscriber, applicationId, startSubIndex,
endSubIndex, groupingId);
if (subscribedAPIs != null && !subscribedAPIs.isEmpty()) {
Map<String, Tier> tiers = APIUtil.getTiers(tenantId);
for (SubscribedAPI subscribedApi : subscribedAPIs) {
Tier tier = tiers.get(subscribedApi.getTier().getName());
subscribedApi.getTier().setDisplayName(tier != null ? tier.getDisplayName() : subscribedApi
.getTier().getName());
// We do not need to add the modified object again.
// subscribedAPIs.add(subscribedApi);
Set<APIKey> keys = getApplicationKeys(subscribedApi.getApplication().getId());
for (APIKey key : keys) {
subscribedApi.getApplication().addKey(key);
}
}
}
} catch (APIManagementException e) {
String msg = "Failed to get APIs of " + subscriber.getName() + " under application " + applicationId;
log.error(msg, e);
throw new APIManagementException(msg, e);
}
return subscribedAPIs;
}
public Integer getSubscriptionCount(Subscriber subscriber,String applicationName,String groupingId)
throws APIManagementException {
return apiMgtDAO.getSubscriptionCount(subscriber,applicationName,groupingId);
}
public Integer getSubscriptionCountByApplicationId(Subscriber subscriber, int applicationId, String groupingId)
throws APIManagementException {
return apiMgtDAO.getSubscriptionCountByApplicationId(subscriber, applicationId, groupingId);
}
@Override
public Set<APIIdentifier> getAPIByConsumerKey(String accessToken) throws APIManagementException {
try {
return apiMgtDAO.getAPIByConsumerKey(accessToken);
} catch (APIManagementException e) {
handleException("Error while obtaining API from API key", e);
}
return null;
}
@Override
public boolean isSubscribed(APIIdentifier apiIdentifier, String userId)
throws APIManagementException {
boolean isSubscribed;
try {
isSubscribed = apiMgtDAO.isSubscribed(apiIdentifier, userId);
} catch (APIManagementException e) {
String msg = "Failed to check if user(" + userId + ") has subscribed to " + apiIdentifier;
log.error(msg, e);
throw new APIManagementException(msg, e);
}
return isSubscribed;
}
@Override
public SubscriptionResponse addSubscription(APIIdentifier identifier, String userId, int applicationId)
throws APIManagementException {
API api = getAPI(identifier);
WorkflowResponse workflowResponse = null;
int subscriptionId;
String tenantAwareUsername = MultitenantUtils.getTenantAwareUsername(userId);
if (APIConstants.PUBLISHED.equals(api.getStatus())) {
subscriptionId = apiMgtDAO.addSubscription(identifier, api.getContext(), applicationId,
APIConstants.SubscriptionStatus.ON_HOLD, tenantAwareUsername);
boolean isTenantFlowStarted = false;
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
isTenantFlowStarted = startTenantFlowForTenantDomain(tenantDomain);
}
String applicationName = apiMgtDAO.getApplicationNameFromId(applicationId);
try {
WorkflowExecutor addSubscriptionWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_CREATION);
SubscriptionWorkflowDTO workflowDTO = new SubscriptionWorkflowDTO();
workflowDTO.setStatus(WorkflowStatus.CREATED);
workflowDTO.setCreatedTime(System.currentTimeMillis());
workflowDTO.setTenantDomain(tenantDomain);
workflowDTO.setTenantId(tenantId);
workflowDTO.setExternalWorkflowReference(addSubscriptionWFExecutor.generateUUID());
workflowDTO.setWorkflowReference(String.valueOf(subscriptionId));
workflowDTO.setWorkflowType(WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_CREATION);
workflowDTO.setCallbackUrl(addSubscriptionWFExecutor.getCallbackURL());
workflowDTO.setApiName(identifier.getApiName());
workflowDTO.setApiContext(api.getContext());
workflowDTO.setApiVersion(identifier.getVersion());
workflowDTO.setApiProvider(identifier.getProviderName());
workflowDTO.setTierName(identifier.getTier());
workflowDTO.setApplicationName(apiMgtDAO.getApplicationNameFromId(applicationId));
workflowDTO.setApplicationId(applicationId);
workflowDTO.setSubscriber(userId);
workflowResponse = addSubscriptionWFExecutor.execute(workflowDTO);
} catch (WorkflowException e) {
//If the workflow execution fails, roll back transaction by removing the subscription entry.
apiMgtDAO.removeSubscriptionById(subscriptionId);
log.error("Could not execute Workflow", e);
throw new APIManagementException("Could not execute Workflow", e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
if (APIUtil.isAPIGatewayKeyCacheEnabled()) {
invalidateCachedKeys(applicationId);
}
//to handle on-the-fly subscription rejection (and removal of subscription entry from the database)
//the response should have {"Status":"REJECTED"} in the json payload for this to work.
boolean subscriptionRejected = false;
String subscriptionStatus = null;
String subscriptionUUID = "";
if (workflowResponse != null && workflowResponse.getJSONPayload() != null
&& !workflowResponse.getJSONPayload().isEmpty()) {
try {
JSONObject wfResponseJson = (JSONObject) new JSONParser().parse(workflowResponse.getJSONPayload());
if (APIConstants.SubscriptionStatus.REJECTED.equals(wfResponseJson.get("Status"))) {
subscriptionRejected = true;
subscriptionStatus = APIConstants.SubscriptionStatus.REJECTED;
}
} catch (ParseException e) {
log.error('\'' + workflowResponse.getJSONPayload() + "' is not a valid JSON.", e);
}
}
if (!subscriptionRejected) {
SubscribedAPI addedSubscription = getSubscriptionById(subscriptionId);
subscriptionStatus = addedSubscription.getSubStatus();
subscriptionUUID = addedSubscription.getUUID();
JSONObject subsLogObject = new JSONObject();
subsLogObject.put(APIConstants.AuditLogConstants.API_NAME, identifier.getApiName());
subsLogObject.put(APIConstants.AuditLogConstants.PROVIDER, identifier.getProviderName());
subsLogObject.put(APIConstants.AuditLogConstants.APPLICATION_ID, applicationId);
subsLogObject.put(APIConstants.AuditLogConstants.APPLICATION_NAME, applicationName);
subsLogObject.put(APIConstants.AuditLogConstants.TIER, identifier.getTier());
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.SUBSCRIPTION, subsLogObject.toString(),
APIConstants.AuditLogConstants.CREATED, this.username);
workflowResponse = new GeneralWorkflowResponse();
}
if (log.isDebugEnabled()) {
String logMessage = "API Name: " + identifier.getApiName() + ", API Version " + identifier.getVersion()
+ ", Subscription Status: " + subscriptionStatus + " subscribe by " + userId
+ " for app " + applicationName;
log.debug(logMessage);
}
return new SubscriptionResponse(subscriptionStatus, subscriptionUUID, workflowResponse);
} else {
throw new APIMgtResourceNotFoundException("Subscriptions not allowed on APIs in the state: " +
api.getStatus());
}
}
@Override
public SubscriptionResponse addSubscription(APIIdentifier identifier, String userId, int applicationId,
String groupId) throws APIManagementException {
boolean isValid = validateApplication(userId, applicationId, groupId);
if (!isValid) {
log.error("Application " + applicationId + " is not accessible to user " + userId);
throw new APIManagementException("Application is not accessible to user " + userId);
}
return addSubscription(identifier, userId, applicationId);
}
/**
* Check whether the application is accessible to the specified user
* @param userId username
* @param applicationId application ID
* @param groupId GroupId list of the application
* @return true if the application is accessible by the specified user
*/
private boolean validateApplication(String userId, int applicationId, String groupId) {
try {
return apiMgtDAO.isAppAllowed(applicationId, userId, groupId);
} catch (APIManagementException e) {
log.error("Error occurred while getting user group id for user: " + userId, e);
}
return false;
}
@Override
public String getSubscriptionStatusById(int subscriptionId) throws APIManagementException {
return apiMgtDAO.getSubscriptionStatusById(subscriptionId);
}
@Override
public void removeSubscription(APIIdentifier identifier, String userId, int applicationId)
throws APIManagementException {
boolean isTenantFlowStarted = false;
String providerTenantDomain = MultitenantUtils.getTenantDomain(APIUtil.
replaceEmailDomainBack(identifier.getProviderName()));
String applicationName = apiMgtDAO.getApplicationNameFromId(applicationId);
try {
if (providerTenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME
.equals(providerTenantDomain)) {
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(providerTenantDomain, true);
isTenantFlowStarted = true;
}
API api = getAPI(identifier);
SubscriptionWorkflowDTO workflowDTO;
WorkflowExecutor createSubscriptionWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_CREATION);
WorkflowExecutor removeSubscriptionWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_DELETION);
String workflowExtRef = apiMgtDAO.getExternalWorkflowReferenceForSubscription(identifier, applicationId);
// in a normal flow workflowExtRef is null when workflows are not enabled
if (workflowExtRef == null) {
workflowDTO = new SubscriptionWorkflowDTO();
} else {
workflowDTO = (SubscriptionWorkflowDTO) apiMgtDAO.retrieveWorkflow(workflowExtRef);
// set tiername to the workflowDTO only when workflows are enabled
SubscribedAPI subscription = apiMgtDAO
.getSubscriptionById(Integer.parseInt(workflowDTO.getWorkflowReference()));
workflowDTO.setTierName(subscription.getTier().getName());
}
workflowDTO.setApiProvider(identifier.getProviderName());
workflowDTO.setApiContext(api.getContext());
workflowDTO.setApiName(identifier.getApiName());
workflowDTO.setApiVersion(identifier.getVersion());
workflowDTO.setApplicationName(applicationName);
workflowDTO.setTenantDomain(tenantDomain);
workflowDTO.setTenantId(tenantId);
workflowDTO.setExternalWorkflowReference(workflowExtRef);
workflowDTO.setSubscriber(userId);
workflowDTO.setCallbackUrl(removeSubscriptionWFExecutor.getCallbackURL());
workflowDTO.setApplicationId(applicationId);
String status = apiMgtDAO.getSubscriptionStatus(identifier, applicationId);
if (APIConstants.SubscriptionStatus.ON_HOLD.equals(status)) {
try {
createSubscriptionWFExecutor.cleanUpPendingTask(workflowExtRef);
} catch (WorkflowException ex) {
// failed cleanup processes are ignored to prevent failing the deletion process
log.warn("Failed to clean pending subscription approval task");
}
}
// update attributes of the new remove workflow to be created
workflowDTO.setStatus(WorkflowStatus.CREATED);
workflowDTO.setWorkflowType(WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_DELETION);
workflowDTO.setCreatedTime(System.currentTimeMillis());
workflowDTO.setExternalWorkflowReference(removeSubscriptionWFExecutor.generateUUID());
removeSubscriptionWFExecutor.execute(workflowDTO);
JSONObject subsLogObject = new JSONObject();
subsLogObject.put(APIConstants.AuditLogConstants.API_NAME, identifier.getApiName());
subsLogObject.put(APIConstants.AuditLogConstants.PROVIDER, identifier.getProviderName());
subsLogObject.put(APIConstants.AuditLogConstants.APPLICATION_ID, applicationId);
subsLogObject.put(APIConstants.AuditLogConstants.APPLICATION_NAME, applicationName);
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.SUBSCRIPTION, subsLogObject.toString(),
APIConstants.AuditLogConstants.DELETED, this.username);
} catch (WorkflowException e) {
String errorMsg = "Could not execute Workflow, " + WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_DELETION +
" for apiID " + identifier.getApiName();
handleException(errorMsg, e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
if (APIUtil.isAPIGatewayKeyCacheEnabled()) {
invalidateCachedKeys(applicationId);
}
if (log.isDebugEnabled()) {
String logMessage = "API Name: " + identifier.getApiName() + ", API Version " +
identifier.getVersion() + " subscription removed from app " + applicationName + " by " + userId;
log.debug(logMessage);
}
}
@Override
public void removeSubscription(APIIdentifier identifier, String userId, int applicationId, String groupId) throws
APIManagementException {
//check application is viewable to logged user
boolean isValid = validateApplication(userId, applicationId, groupId);
if (!isValid) {
log.error("Application " + applicationId + " is not accessible to user " + userId);
throw new APIManagementException("Application is not accessible to user " + userId);
}
removeSubscription(identifier, userId, applicationId);
}
/**
* Removes a subscription specified by SubscribedAPI object
*
* @param subscription SubscribedAPI object
* @throws APIManagementException
*/
@Override
public void removeSubscription(SubscribedAPI subscription) throws APIManagementException {
String uuid = subscription.getUUID();
SubscribedAPI subscribedAPI = apiMgtDAO.getSubscriptionByUUID(uuid);
if (subscribedAPI != null) {
Application application = subscribedAPI.getApplication();
APIIdentifier identifier = subscribedAPI.getApiId();
String userId = application.getSubscriber().getName();
removeSubscription(identifier, userId, application.getId());
if (log.isDebugEnabled()) {
String appName = application.getName();
String logMessage =
"API Name: " + identifier.getApiName() + ", API Version " + identifier.getVersion() +
" subscription (uuid : " + uuid + ") removed from app " + appName;
log.debug(logMessage);
}
} else {
throw new APIManagementException("Subscription for UUID:" + uuid +" does not exist.");
}
}
/**
*
* @param applicationId Application ID related cache keys to be cleared
* @throws APIManagementException
*/
private void invalidateCachedKeys(int applicationId) throws APIManagementException {
CacheInvalidator.getInstance().invalidateCacheForApp(applicationId);
}
@Override
public void removeSubscriber(APIIdentifier identifier, String userId)
throws APIManagementException {
throw new UnsupportedOperationException("Unsubscribe operation is not yet implemented");
}
@Override
public void updateSubscriptions(APIIdentifier identifier, String userId, int applicationId)
throws APIManagementException {
API api = getAPI(identifier);
apiMgtDAO.updateSubscriptions(identifier, api.getContext(), applicationId, userId);
}
@Override
public void addComment(APIIdentifier identifier, String commentText, String user) throws APIManagementException {
apiMgtDAO.addComment(identifier, commentText, user);
}
@Override
public org.wso2.carbon.apimgt.api.model.Comment[] getComments(APIIdentifier identifier)
throws APIManagementException {
return apiMgtDAO.getComments(identifier);
}
/**
* Add a new Application from the store.
* @param application - {@link org.wso2.carbon.apimgt.api.model.Application}
* @param userId - {@link String}
* @return {@link String}
*/
@Override
public int addApplication(Application application, String userId)
throws APIManagementException {
if (application.getName() != null && (application.getName().length() != application.getName().trim().length())) {
handleApplicationNameContainSpacesException("Application name " +
"cannot contain leading or trailing white spaces");
}
String regex = "^[a-zA-Z0-9 ._-]*$";
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(application.getName());
if (!matcher.find()) {
handleApplicationNameContainsInvalidCharactersException("Application name contains invalid characters");
}
if (APIUtil.isApplicationExist(userId, application.getName(), application.getGroupId())) {
handleResourceAlreadyExistsException(
"A duplicate application already exists by the name - " + application.getName());
}
//check whether callback url is empty and set null
if (StringUtils.isBlank(application.getCallbackUrl())) {
application.setCallbackUrl(null);
}
int applicationId = apiMgtDAO.addApplication(application, userId);
JSONObject appLogObject = new JSONObject();
appLogObject.put(APIConstants.AuditLogConstants.NAME, application.getName());
appLogObject.put(APIConstants.AuditLogConstants.TIER, application.getTier());
appLogObject.put(APIConstants.AuditLogConstants.CALLBACK, application.getCallbackUrl());
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.CREATED, this.username);
boolean isTenantFlowStarted = false;
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
isTenantFlowStarted = startTenantFlowForTenantDomain(tenantDomain);
}
try {
WorkflowExecutor appCreationWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_CREATION);
ApplicationWorkflowDTO appWFDto = new ApplicationWorkflowDTO();
appWFDto.setApplication(application);
appWFDto.setExternalWorkflowReference(appCreationWFExecutor.generateUUID());
appWFDto.setWorkflowReference(String.valueOf(applicationId));
appWFDto.setWorkflowType(WorkflowConstants.WF_TYPE_AM_APPLICATION_CREATION);
appWFDto.setCallbackUrl(appCreationWFExecutor.getCallbackURL());
appWFDto.setStatus(WorkflowStatus.CREATED);
appWFDto.setTenantDomain(tenantDomain);
appWFDto.setTenantId(tenantId);
appWFDto.setUserName(userId);
appWFDto.setCreatedTime(System.currentTimeMillis());
appCreationWFExecutor.execute(appWFDto);
} catch (WorkflowException e) {
//If the workflow execution fails, roll back transaction by removing the application entry.
application.setId(applicationId);
apiMgtDAO.deleteApplication(application);
log.error("Unable to execute Application Creation Workflow", e);
handleException("Unable to execute Application Creation Workflow", e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
if (log.isDebugEnabled()) {
log.debug("Application Name: " + application.getName() +" added successfully.");
}
return applicationId;
}
/** Updates an Application identified by its id
*
* @param application Application object to be updated
* @throws APIManagementException
*/
@Override
public void updateApplication(Application application) throws APIManagementException {
Application existingApp;
String uuid = application.getUUID();
if (!StringUtils.isEmpty(uuid)) {
existingApp = apiMgtDAO.getApplicationByUUID(uuid);
if (existingApp != null) {
Set<APIKey> keys = getApplicationKeys(existingApp.getId());
for (APIKey key : keys) {
existingApp.addKey(key);
}
}
application.setId(existingApp.getId());
} else {
existingApp = apiMgtDAO.getApplicationById(application.getId());
}
if (existingApp != null && APIConstants.ApplicationStatus.APPLICATION_CREATED.equals(existingApp.getStatus())) {
throw new APIManagementException("Cannot update the application while it is INACTIVE");
}
boolean isCaseInsensitiveComparisons = Boolean.parseBoolean(getAPIManagerConfiguration().
getFirstProperty(APIConstants.API_STORE_FORCE_CI_COMPARISIONS));
boolean isUserAppOwner;
if (isCaseInsensitiveComparisons) {
isUserAppOwner = application.getSubscriber().getName().
equalsIgnoreCase(existingApp.getSubscriber().getName());
} else {
isUserAppOwner = application.getSubscriber().getName().equals(existingApp.getSubscriber().getName());
}
if (!isUserAppOwner) {
throw new APIManagementException("user: " + application.getSubscriber().getName() + ", " +
"attempted to update application owned by: " + existingApp.getSubscriber().getName());
}
if (application.getName() != null && (application.getName().length() != application.getName().trim().length())) {
handleApplicationNameContainSpacesException("Application name " +
"cannot contain leading or trailing white spaces");
}
String regex = "^[a-zA-Z0-9 ._-]*$";
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(application.getName());
if (!matcher.find()) {
handleApplicationNameContainsInvalidCharactersException("Application name contains invalid characters");
}
apiMgtDAO.updateApplication(application);
if (log.isDebugEnabled()) {
log.debug("Successfully updated the Application: " + application.getId() +" in the database.");
}
JSONObject appLogObject = new JSONObject();
appLogObject.put(APIConstants.AuditLogConstants.NAME, application.getName());
appLogObject.put(APIConstants.AuditLogConstants.TIER, application.getTier());
appLogObject.put(APIConstants.AuditLogConstants.STATUS, existingApp != null ? existingApp.getStatus() : "");
appLogObject.put(APIConstants.AuditLogConstants.CALLBACK, application.getCallbackUrl());
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.UPDATED, this.username);
try {
invalidateCachedKeys(application.getId());
} catch (APIManagementException ignore) {
//Log and ignore since we do not want to throw exceptions to the front end due to cache invalidation failure.
log.warn("Failed to invalidate Gateway Cache " + ignore.getMessage(), ignore);
}
}
/**
* Function to remove an Application from the API Store
*
* @param application - The Application Object that represents the Application
* @param username
* @throws APIManagementException
*/
@Override
public void removeApplication(Application application, String username) throws APIManagementException {
String uuid = application.getUUID();
if (application.getId() == 0 && !StringUtils.isEmpty(uuid)) {
application = apiMgtDAO.getApplicationByUUID(uuid);
if (application != null) {
Set<APIKey> keys = getApplicationKeys(application.getId());
for (APIKey key : keys) {
application.addKey(key);
}
}
}
boolean isTenantFlowStarted = false;
int applicationId = application.getId();
boolean isCaseInsensitiveComparisons = Boolean.parseBoolean(getAPIManagerConfiguration().
getFirstProperty(APIConstants.API_STORE_FORCE_CI_COMPARISIONS));
boolean isUserAppOwner;
if (isCaseInsensitiveComparisons) {
isUserAppOwner = application.getSubscriber().getName().equalsIgnoreCase(username);
} else {
isUserAppOwner = application.getSubscriber().getName().equals(username);
}
if (!isUserAppOwner) {
throw new APIManagementException("user: " + username + ", " +
"attempted to remove application owned by: " + application.getSubscriber().getName());
}
try {
String workflowExtRef;
ApplicationWorkflowDTO workflowDTO;
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
PrivilegedCarbonContext.startTenantFlow();
isTenantFlowStarted = true;
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
}
WorkflowExecutor createApplicationWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_CREATION);
WorkflowExecutor createSubscriptionWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_CREATION);
WorkflowExecutor createProductionRegistrationWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_PRODUCTION);
WorkflowExecutor createSandboxRegistrationWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_SANDBOX);
WorkflowExecutor removeApplicationWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_DELETION);
workflowExtRef = apiMgtDAO.getExternalWorkflowReferenceByApplicationID(application.getId());
// in a normal flow workflowExtRef is null when workflows are not enabled
if (workflowExtRef == null) {
workflowDTO = new ApplicationWorkflowDTO();
} else {
workflowDTO = (ApplicationWorkflowDTO) apiMgtDAO.retrieveWorkflow(workflowExtRef);
}
workflowDTO.setApplication(application);
workflowDTO.setCallbackUrl(removeApplicationWFExecutor.getCallbackURL());
workflowDTO.setUserName(this.username);
workflowDTO.setTenantDomain(tenantDomain);
workflowDTO.setTenantId(tenantId);
// Remove from cache first since we won't be able to find active access tokens
// once the application is removed.
invalidateCachedKeys(application.getId());
// clean up pending subscription tasks
Set<Integer> pendingSubscriptions = apiMgtDAO.getPendingSubscriptionsByApplicationId(applicationId);
for (int subscription : pendingSubscriptions) {
try {
workflowExtRef = apiMgtDAO.getExternalWorkflowReferenceForSubscription(subscription);
createSubscriptionWFExecutor.cleanUpPendingTask(workflowExtRef);
} catch (APIManagementException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to get external workflow reference for subscription " + subscription);
} catch (WorkflowException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to clean pending subscription approval task: " + subscription);
}
}
// cleanup pending application registration tasks
String productionKeyStatus = apiMgtDAO
.getRegistrationApprovalState(applicationId, APIConstants.API_KEY_TYPE_PRODUCTION);
String sandboxKeyStatus = apiMgtDAO
.getRegistrationApprovalState(applicationId, APIConstants.API_KEY_TYPE_SANDBOX);
if (WorkflowStatus.CREATED.toString().equals(productionKeyStatus)) {
try {
workflowExtRef = apiMgtDAO
.getRegistrationWFReference(applicationId, APIConstants.API_KEY_TYPE_PRODUCTION);
createProductionRegistrationWFExecutor.cleanUpPendingTask(workflowExtRef);
} catch (APIManagementException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to get external workflow reference for production key of application "
+ applicationId);
} catch (WorkflowException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to clean pending production key approval task of " + applicationId);
}
}
if (WorkflowStatus.CREATED.toString().equals(sandboxKeyStatus)) {
try {
workflowExtRef = apiMgtDAO
.getRegistrationWFReference(applicationId, APIConstants.API_KEY_TYPE_SANDBOX);
createSandboxRegistrationWFExecutor.cleanUpPendingTask(workflowExtRef);
} catch (APIManagementException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to get external workflow reference for sandbox key of application "
+ applicationId);
} catch (WorkflowException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to clean pending sandbox key approval task of " + applicationId);
}
}
if (workflowExtRef != null) {
try {
createApplicationWFExecutor.cleanUpPendingTask(workflowExtRef);
} catch (WorkflowException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to clean pending application approval task of " + applicationId);
}
}
// update attributes of the new remove workflow to be created
workflowDTO.setStatus(WorkflowStatus.CREATED);
workflowDTO.setCreatedTime(System.currentTimeMillis());
workflowDTO.setWorkflowType(WorkflowConstants.WF_TYPE_AM_APPLICATION_DELETION);
workflowDTO.setExternalWorkflowReference(removeApplicationWFExecutor.generateUUID());
removeApplicationWFExecutor.execute(workflowDTO);
JSONObject appLogObject = new JSONObject();
appLogObject.put(APIConstants.AuditLogConstants.NAME, application.getName());
appLogObject.put(APIConstants.AuditLogConstants.TIER, application.getTier());
appLogObject.put(APIConstants.AuditLogConstants.CALLBACK, application.getCallbackUrl());
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.DELETED, this.username);
} catch (WorkflowException e) {
String errorMsg = "Could not execute Workflow, " + WorkflowConstants.WF_TYPE_AM_APPLICATION_DELETION + " " +
"for applicationID " + application.getId();
handleException(errorMsg, e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
if (log.isDebugEnabled()) {
String logMessage = "Application Name: " + application.getName() + " successfully removed";
log.debug(logMessage);
}
}
/**
* This method specifically implemented for REST API by removing application and data access logic
* from host object layer. So as per new implementation we need to pass requested scopes to this method
* as tokenScope. So we will do scope related other logic here in this method.
* So host object should only pass required 9 parameters.
* */
@Override
public Map<String, Object> requestApprovalForApplicationRegistration(String userId, String applicationName,
String tokenType, String callbackUrl,
String[] allowedDomains, String validityTime,
String tokenScope, String groupingId,
String jsonString
)
throws APIManagementException {
boolean isTenantFlowStarted = false;
String tenantDomain = MultitenantUtils.getTenantDomain(userId);
int tenantId = MultitenantConstants.INVALID_TENANT_ID;
try {
tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(tenantDomain);
} catch (UserStoreException e) {
handleException("Unable to retrieve the tenant information of the current user.", e);
}
//checking for authorized scopes
Set<Scope> scopeSet = new LinkedHashSet<Scope>();
List<Scope> authorizedScopes = new ArrayList<Scope>();
String authScopeString;
if (tokenScope != null && tokenScope.length() != 0 &&
!APIConstants.OAUTH2_DEFAULT_SCOPE.equals(tokenScope)) {
scopeSet.addAll(getScopesByScopeKeys(tokenScope, tenantId));
authorizedScopes = getAllowedScopesForUserApplication(userId, scopeSet);
}
if (!authorizedScopes.isEmpty()) {
Set<Scope> authorizedScopeSet = new HashSet<Scope>(authorizedScopes);
StringBuilder scopeBuilder = new StringBuilder();
for (Scope scope : authorizedScopeSet) {
scopeBuilder.append(scope.getKey()).append(' ');
}
authScopeString = scopeBuilder.toString();
} else {
authScopeString = APIConstants.OAUTH2_DEFAULT_SCOPE;
}
try {
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
isTenantFlowStarted = startTenantFlowForTenantDomain(tenantDomain);
}
// initiate WorkflowExecutor
WorkflowExecutor appRegistrationWorkflow = null;
// initiate ApplicationRegistrationWorkflowDTO
ApplicationRegistrationWorkflowDTO appRegWFDto = null;
ApplicationKeysDTO appKeysDto = new ApplicationKeysDTO();
// get APIM application by Application Name and userId.
Application application = ApplicationUtils.retrieveApplication(applicationName, userId, groupingId);
boolean isCaseInsensitiveComparisons = Boolean.parseBoolean(getAPIManagerConfiguration().
getFirstProperty(APIConstants.API_STORE_FORCE_CI_COMPARISIONS));
boolean isUserAppOwner;
if (isCaseInsensitiveComparisons) {
isUserAppOwner = application.getSubscriber().getName().equalsIgnoreCase(userId);
} else {
isUserAppOwner = application.getSubscriber().getName().equals(userId);
}
if (!isUserAppOwner) {
throw new APIManagementException("user: " + application.getSubscriber().getName() + ", " +
"attempted to generate tokens for application owned by: " + userId);
}
// if its a PRODUCTION application.
if (APIConstants.API_KEY_TYPE_PRODUCTION.equals(tokenType)) {
// initiate workflow type. By default simple work flow will be
// executed.
appRegistrationWorkflow =
getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_PRODUCTION);
appRegWFDto =
(ApplicationRegistrationWorkflowDTO) WorkflowExecutorFactory.getInstance()
.createWorkflowDTO(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_PRODUCTION);
}// if it is a sandBox application.
else if (APIConstants.API_KEY_TYPE_SANDBOX.equals(tokenType)) { // if
// its
// a
// SANDBOX
// application.
appRegistrationWorkflow =
getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_SANDBOX);
appRegWFDto =
(ApplicationRegistrationWorkflowDTO) WorkflowExecutorFactory.getInstance()
.createWorkflowDTO(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_SANDBOX);
} else {
throw new APIManagementException("Invalid Token Type '" + tokenType + "' requested.");
}
//check whether callback url is empty and set null
if (StringUtils.isBlank(callbackUrl)) {
callbackUrl = null;
}
String applicationTokenType = application.getTokenType();
if (StringUtils.isEmpty(application.getTokenType())) {
applicationTokenType = APIConstants.DEFAULT_TOKEN_TYPE;
}
// Build key manager instance and create oAuthAppRequest by jsonString.
OAuthAppRequest request =
ApplicationUtils.createOauthAppRequest(applicationName, null,
callbackUrl, authScopeString, jsonString, applicationTokenType);
request.getOAuthApplicationInfo().addParameter(ApplicationConstants.VALIDITY_PERIOD, validityTime);
request.getOAuthApplicationInfo().addParameter(ApplicationConstants.APP_KEY_TYPE, tokenType);
request.getOAuthApplicationInfo().addParameter(ApplicationConstants.APP_CALLBACK_URL, callbackUrl);
// Setting request values in WorkflowDTO - In future we should keep
// Application/OAuthApplication related
// information in the respective entities not in the workflowDTO.
appRegWFDto.setStatus(WorkflowStatus.CREATED);
appRegWFDto.setCreatedTime(System.currentTimeMillis());
appRegWFDto.setTenantDomain(tenantDomain);
appRegWFDto.setTenantId(tenantId);
appRegWFDto.setExternalWorkflowReference(appRegistrationWorkflow.generateUUID());
appRegWFDto.setWorkflowReference(appRegWFDto.getExternalWorkflowReference());
appRegWFDto.setApplication(application);
request.setMappingId(appRegWFDto.getWorkflowReference());
if (!application.getSubscriber().getName().equals(userId)) {
appRegWFDto.setUserName(application.getSubscriber().getName());
} else {
appRegWFDto.setUserName(userId);
}
appRegWFDto.setCallbackUrl(appRegistrationWorkflow.getCallbackURL());
appRegWFDto.setAppInfoDTO(request);
appRegWFDto.setDomainList(allowedDomains);
appRegWFDto.setKeyDetails(appKeysDto);
appRegistrationWorkflow.execute(appRegWFDto);
Map<String, Object> keyDetails = new HashMap<String, Object>();
keyDetails.put("keyState", appRegWFDto.getStatus().toString());
OAuthApplicationInfo applicationInfo = appRegWFDto.getApplicationInfo();
if (applicationInfo != null) {
keyDetails.put("consumerKey", applicationInfo.getClientId());
keyDetails.put("consumerSecret", applicationInfo.getClientSecret());
keyDetails.put("appDetails", applicationInfo.getJsonString());
}
// There can be instances where generating the Application Token is
// not required. In those cases,
// token info will have nothing.
AccessTokenInfo tokenInfo = appRegWFDto.getAccessTokenInfo();
if (tokenInfo != null) {
keyDetails.put("accessToken", tokenInfo.getAccessToken());
keyDetails.put("validityTime", tokenInfo.getValidityPeriod());
keyDetails.put("tokenDetails", tokenInfo.getJSONString());
keyDetails.put("tokenScope", tokenInfo.getScopes());
}
JSONObject appLogObject = new JSONObject();
appLogObject.put("Generated keys for application", application.getName());
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.UPDATED, this.username);
return keyDetails;
} catch (WorkflowException e) {
log.error("Could not execute Workflow", e);
throw new APIManagementException(e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
}
@Override
public Map<String, Object> requestApprovalForApplicationRegistrationByApplicationId(
Map<String, Object> appInfo) throws APIManagementException {
if (appInfo == null || appInfo.isEmpty()) {
log.error("Application information is not provided to request approval For Application Registration");
return new HashMap<String, Object>(0);
}
boolean isTenantFlowStarted = false;
String username = appInfo.get("username").toString();
String scopes = appInfo.get("scopes").toString();
String applicationName = appInfo.get("applicationName").toString();
String groupingId = appInfo.get("groupingId").toString();
String tokenType = appInfo.get("tokenType").toString();
String callbackUrl = appInfo.get("callbackUrl").toString();
String jsonParams = appInfo.get("jsonParams").toString();
String[] allowedDomains = (String[]) appInfo.get("allowedDomains");
String validityTime = appInfo.get("validityPeriod").toString();
int applicationId = Integer.valueOf(appInfo.get("applicationId").toString());
String tenantDomain = MultitenantUtils.getTenantDomain(username);
int tenantId = MultitenantConstants.INVALID_TENANT_ID;
try {
tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(tenantDomain);
} catch (UserStoreException e) {
String msg = "Unable to retrieve the tenant information of the current user.";
log.error(msg, e);
throw new APIManagementException(msg, e);
}
//checking for authorized scopes
Set<Scope> scopeSet = new LinkedHashSet<Scope>();
List<Scope> authorizedScopes = new ArrayList<Scope>();
String authScopeString;
if (scopes != null && scopes.length() != 0 && !APIConstants.OAUTH2_DEFAULT_SCOPE.equals(scopes)) {
scopeSet.addAll(getScopesByScopeKeys(scopes, tenantId));
authorizedScopes = getAllowedScopesForUserApplication(username, scopeSet);
}
if (!authorizedScopes.isEmpty()) {
StringBuilder scopeBuilder = new StringBuilder();
for (Scope scope : authorizedScopes) {
scopeBuilder.append(scope.getKey()).append(' ');
}
authScopeString = scopeBuilder.toString();
} else {
authScopeString = APIConstants.OAUTH2_DEFAULT_SCOPE;
}
try {
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
isTenantFlowStarted = startTenantFlowForTenantDomain(tenantDomain);
}
// initiate WorkflowExecutor
WorkflowExecutor appRegistrationWorkflow = null;
// initiate ApplicationRegistrationWorkflowDTO
ApplicationRegistrationWorkflowDTO appRegWFDto = null;
ApplicationKeysDTO appKeysDto = new ApplicationKeysDTO();
// get APIM application by Application Id.
Application application = ApplicationUtils.retrieveApplicationById(applicationId);
// if its a PRODUCTION application.
if (APIConstants.API_KEY_TYPE_PRODUCTION.equals(tokenType)) {
// initiate workflow type. By default simple work flow will be
// executed.
appRegistrationWorkflow = getWorkflowExecutor(
WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_PRODUCTION);
appRegWFDto = (ApplicationRegistrationWorkflowDTO) WorkflowExecutorFactory.getInstance()
.createWorkflowDTO(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_PRODUCTION);
}// if it is a sandBox application.
else if (APIConstants.API_KEY_TYPE_SANDBOX.equals(tokenType)) {
appRegistrationWorkflow = getWorkflowExecutor(
WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_SANDBOX);
appRegWFDto = (ApplicationRegistrationWorkflowDTO) WorkflowExecutorFactory.getInstance()
.createWorkflowDTO(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_SANDBOX);
} else {
throw new APIManagementException("Invalid Token Type '" + tokenType + "' requested.");
}
//check whether callback url is empty and set null
if (StringUtils.isBlank(callbackUrl)) {
callbackUrl = null;
}
String applicationTokenType = application.getTokenType();
if (StringUtils.isEmpty(application.getTokenType())) {
applicationTokenType = APIConstants.DEFAULT_TOKEN_TYPE;
}
// Build key manager instance and create oAuthAppRequest by jsonString.
OAuthAppRequest request = ApplicationUtils
.createOauthAppRequest(applicationName, null, callbackUrl, authScopeString, jsonParams,
applicationTokenType);
request.getOAuthApplicationInfo().addParameter(ApplicationConstants.VALIDITY_PERIOD, validityTime);
request.getOAuthApplicationInfo().addParameter(ApplicationConstants.APP_KEY_TYPE, tokenType);
request.getOAuthApplicationInfo().addParameter(ApplicationConstants.APP_CALLBACK_URL, callbackUrl);
// Setting request values in WorkflowDTO - In future we should keep
// Application/OAuthApplication related
// information in the respective entities not in the workflowDTO.
appRegWFDto.setStatus(WorkflowStatus.CREATED);
appRegWFDto.setCreatedTime(System.currentTimeMillis());
appRegWFDto.setTenantDomain(tenantDomain);
appRegWFDto.setTenantId(tenantId);
appRegWFDto.setExternalWorkflowReference(appRegistrationWorkflow.generateUUID());
appRegWFDto.setWorkflowReference(appRegWFDto.getExternalWorkflowReference());
appRegWFDto.setApplication(application);
request.setMappingId(appRegWFDto.getWorkflowReference());
if (!application.getSubscriber().getName().equals(username)) {
appRegWFDto.setUserName(application.getSubscriber().getName());
} else {
appRegWFDto.setUserName(username);
}
appRegWFDto.setCallbackUrl(appRegistrationWorkflow.getCallbackURL());
appRegWFDto.setAppInfoDTO(request);
appRegWFDto.setDomainList(allowedDomains);
appRegWFDto.setKeyDetails(appKeysDto);
appRegistrationWorkflow.execute(appRegWFDto);
Map<String, Object> keyDetails = new HashMap<String, Object>();
keyDetails.put("keyState", appRegWFDto.getStatus().toString());
OAuthApplicationInfo applicationInfo = appRegWFDto.getApplicationInfo();
if (applicationInfo != null) {
keyDetails.put("consumerKey", applicationInfo.getClientId());
keyDetails.put("consumerSecret", applicationInfo.getClientSecret());
keyDetails.put("appDetails", applicationInfo.getJsonString());
}
// There can be instances where generating the Application Token is
// not required. In those cases,
// token info will have nothing.
AccessTokenInfo tokenInfo = appRegWFDto.getAccessTokenInfo();
if (tokenInfo != null) {
keyDetails.put("accessToken", tokenInfo.getAccessToken());
keyDetails.put("validityTime", tokenInfo.getValidityPeriod());
keyDetails.put("tokenDetails", tokenInfo.getJSONString());
keyDetails.put("tokenScope", tokenInfo.getScopes());
}
JSONObject appLogObject = new JSONObject();
appLogObject.put("Generated keys for application", application.getName());
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.UPDATED, this.username);
return keyDetails;
} catch (WorkflowException e) {
log.error("Could not execute Workflow", e);
throw new APIManagementException("Could not execute Workflow", e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
}
private static List<Scope> getAllowedScopesForUserApplication(String username,
Set<Scope> reqScopeSet) {
String[] userRoles = null;
org.wso2.carbon.user.api.UserStoreManager userStoreManager = null;
List<Scope> authorizedScopes = new ArrayList<Scope>();
try {
RealmService realmService = ServiceReferenceHolder.getInstance().getRealmService();
int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(MultitenantUtils.getTenantDomain(username));
userStoreManager = realmService.getTenantUserRealm(tenantId).getUserStoreManager();
userRoles = userStoreManager.getRoleListOfUser(MultitenantUtils.getTenantAwareUsername(username));
} catch (org.wso2.carbon.user.api.UserStoreException e) {
// Log and return since we do not want to stop issuing the token in
// case of scope validation failures.
log.error("Error when getting the tenant's UserStoreManager or when getting roles of user ", e);
}
List<String> userRoleList;
if (userRoles != null) {
userRoleList = new ArrayList<String>(Arrays.asList(userRoles));
} else {
userRoleList = Collections.emptyList();
}
//Iterate the requested scopes list.
for (Scope scope : reqScopeSet) {
//Get the set of roles associated with the requested scope.
String roles = scope.getRoles();
//If the scope has been defined in the context of the App and if roles have been defined for the scope
if (roles != null && roles.length() != 0) {
List<String> roleList =
new ArrayList<String>(Arrays.asList(roles.replaceAll(" ", EMPTY_STRING).split(",")));
//Check if user has at least one of the roles associated with the scope
roleList.retainAll(userRoleList);
if (!roleList.isEmpty()) {
authorizedScopes.add(scope);
}
}
}
return authorizedScopes;
}
@Override
public Map<String, String> completeApplicationRegistration(String userId, String applicationName, String tokenType,
String tokenScope, String groupingId)
throws APIManagementException {
Application application = apiMgtDAO.getApplicationByName(applicationName, userId, groupingId);
String status = apiMgtDAO.getRegistrationApprovalState(application.getId(), tokenType);
Map<String, String> keyDetails = null;
if (!application.getSubscriber().getName().equals(userId)) {
userId = application.getSubscriber().getName();
}
String workflowReference = apiMgtDAO.getWorkflowReference(applicationName, userId);
if (workflowReference != null) {
WorkflowDTO workflowDTO = null;
// Creating workflowDTO for the correct key type.
if (APIConstants.API_KEY_TYPE_PRODUCTION.equals(tokenType)) {
workflowDTO = WorkflowExecutorFactory.getInstance().createWorkflowDTO(
WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_PRODUCTION);
} else if (APIConstants.API_KEY_TYPE_SANDBOX.equals(tokenType)) {
workflowDTO = WorkflowExecutorFactory.getInstance().createWorkflowDTO(
WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_SANDBOX);
}
if (workflowDTO != null) {
// Set the workflow reference in the workflow dto and the populate method will fill in other details
// using the persisted request.
ApplicationRegistrationWorkflowDTO registrationWorkflowDTO = (ApplicationRegistrationWorkflowDTO)
workflowDTO;
registrationWorkflowDTO.setExternalWorkflowReference(workflowReference);
if (APIConstants.AppRegistrationStatus.REGISTRATION_APPROVED.equals(status)) {
apiMgtDAO.populateAppRegistrationWorkflowDTO(registrationWorkflowDTO);
try {
AbstractApplicationRegistrationWorkflowExecutor.dogenerateKeysForApplication
(registrationWorkflowDTO);
AccessTokenInfo tokenInfo = registrationWorkflowDTO.getAccessTokenInfo();
OAuthApplicationInfo oauthApp = registrationWorkflowDTO.getApplicationInfo();
keyDetails = new HashMap<String, String>();
if (tokenInfo != null) {
keyDetails.put("accessToken", tokenInfo.getAccessToken());
keyDetails.put("validityTime", Long.toString(tokenInfo.getValidityPeriod()));
keyDetails.put("tokenDetails", tokenInfo.getJSONString());
}
keyDetails.put("consumerKey", oauthApp.getClientId());
keyDetails.put("consumerSecret", oauthApp.getClientSecret());
keyDetails.put("appDetails", oauthApp.getJsonString());
} catch (APIManagementException e) {
APIUtil.handleException("Error occurred while Creating Keys.", e);
}
}
}
}
return keyDetails;
}
@Override
public Map<String, String> completeApplicationRegistration(String userId, int applicationId,
String tokenType, String tokenScope, String groupingId) throws APIManagementException {
Application application = apiMgtDAO.getApplicationById(applicationId);
String status = apiMgtDAO.getRegistrationApprovalState(application.getId(), tokenType);
Map<String, String> keyDetails = null;
if (!application.getSubscriber().getName().equals(userId)) {
userId = application.getSubscriber().getName();
}
//todo get workflow reference by appId
String workflowReference = apiMgtDAO.getWorkflowReferenceByApplicationId(application.getId(), userId);
if (workflowReference != null) {
WorkflowDTO workflowDTO = null;
// Creating workflowDTO for the correct key type.
if (APIConstants.API_KEY_TYPE_PRODUCTION.equals(tokenType)) {
workflowDTO = WorkflowExecutorFactory.getInstance()
.createWorkflowDTO(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_PRODUCTION);
} else if (APIConstants.API_KEY_TYPE_SANDBOX.equals(tokenType)) {
workflowDTO = WorkflowExecutorFactory.getInstance()
.createWorkflowDTO(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_SANDBOX);
}
if (workflowDTO != null) {
// Set the workflow reference in the workflow dto and the populate method will fill in other details
// using the persisted request.
ApplicationRegistrationWorkflowDTO registrationWorkflowDTO = (ApplicationRegistrationWorkflowDTO) workflowDTO;
registrationWorkflowDTO.setExternalWorkflowReference(workflowReference);
if (APIConstants.AppRegistrationStatus.REGISTRATION_APPROVED.equals(status)) {
apiMgtDAO.populateAppRegistrationWorkflowDTO(registrationWorkflowDTO);
try {
AbstractApplicationRegistrationWorkflowExecutor
.dogenerateKeysForApplication(registrationWorkflowDTO);
AccessTokenInfo tokenInfo = registrationWorkflowDTO.getAccessTokenInfo();
OAuthApplicationInfo oauthApp = registrationWorkflowDTO.getApplicationInfo();
keyDetails = new HashMap<String, String>();
if (tokenInfo != null) {
keyDetails.put("accessToken", tokenInfo.getAccessToken());
keyDetails.put("validityTime", Long.toString(tokenInfo.getValidityPeriod()));
keyDetails.put("tokenDetails", tokenInfo.getJSONString());
}
keyDetails.put("consumerKey", oauthApp.getClientId());
keyDetails.put("consumerSecret", oauthApp.getClientSecret());
keyDetails.put("accessallowdomains", registrationWorkflowDTO.getDomainList());
keyDetails.put("appDetails", oauthApp.getJsonString());
} catch (APIManagementException e) {
APIUtil.handleException("Error occurred while Creating Keys.", e);
}
}
}
}
return keyDetails;
}
/**
*
* @param userId APIM subscriber user ID.
* @param ApplicationName APIM application name.
* @return
* @throws APIManagementException
*/
@Override
public Application getApplicationsByName(String userId, String ApplicationName, String groupingId) throws
APIManagementException {
Application application = apiMgtDAO.getApplicationByName(ApplicationName, userId,groupingId);
if (application != null) {
checkAppAttributes(application, userId);
}
application = apiMgtDAO.getApplicationWithOAuthApps(ApplicationName, userId, groupingId);
if (application != null) {
Set<APIKey> keys = getApplicationKeys(application.getId());
for (APIKey key : keys) {
application.addKey(key);
}
}
return application;
}
/**
* Returns the corresponding application given the Id
* @param id Id of the Application
* @return it will return Application corresponds to the id.
* @throws APIManagementException
*/
@Override
public Application getApplicationById(int id) throws APIManagementException {
Application application = apiMgtDAO.getApplicationById(id);
Set<APIKey> keys = getApplicationKeys(application.getId());
for (APIKey key : keys) {
application.addKey(key);
}
return application;
}
/*
* @see super.getApplicationById(int id, String userId, String groupId)
* */
@Override
public Application getApplicationById(int id, String userId, String groupId) throws APIManagementException {
Application application = apiMgtDAO.getApplicationById(id, userId, groupId);
Set<APIKey> keys = getApplicationKeys(application.getId());
for (APIKey key : keys) {
application.addKey(key);
}
return application;
}
/** get the status of the Application creation process given the application Id
*
* @param applicationId Id of the Application
* @return
* @throws APIManagementException
*/
@Override
public String getApplicationStatusById(int applicationId) throws APIManagementException {
return apiMgtDAO.getApplicationStatusById(applicationId);
}
@Override
public boolean isApplicationTokenExists(String accessToken) throws APIManagementException {
return apiMgtDAO.isAccessTokenExists(accessToken);
}
@Override
public Set<SubscribedAPI> getSubscribedIdentifiers(Subscriber subscriber, APIIdentifier identifier, String groupingId)
throws APIManagementException {
Set<SubscribedAPI> subscribedAPISet = new HashSet<>();
Set<SubscribedAPI> subscribedAPIs = getSubscribedAPIs(subscriber, groupingId);
for (SubscribedAPI api : subscribedAPIs) {
if (api.getApiId().equals(identifier)) {
Set<APIKey> keys = getApplicationKeys(api.getApplication().getId());
for (APIKey key : keys) {
api.addKey(key);
}
subscribedAPISet.add(api);
}
}
return subscribedAPISet;
}
/**
* Returns a list of tiers denied
*
* @return Set<Tier>
*/
@Override
public Set<String> getDeniedTiers() throws APIManagementException {
// '0' is passed as argument whenever tenant id of logged in user is needed
return getDeniedTiers(0);
}
/**
* Returns a list of tiers denied
* @param apiProviderTenantId tenant id of API provider
* @return Set<Tier>
*/
@Override
public Set<String> getDeniedTiers(int apiProviderTenantId) throws APIManagementException {
Set<String> deniedTiers = new HashSet<String>();
String[] currentUserRoles;
if (apiProviderTenantId == 0) {
apiProviderTenantId = tenantId;
}
try {
if (apiProviderTenantId != 0) {
/* Get the roles of the Current User */
currentUserRoles = ((UserRegistry) ((UserAwareAPIConsumer) this).registry).
getUserRealm().getUserStoreManager().getRoleListOfUser(((UserRegistry) this.registry)
.getUserName());
Set<TierPermissionDTO> tierPermissions;
if (APIUtil.isAdvanceThrottlingEnabled()) {
tierPermissions = apiMgtDAO.getThrottleTierPermissions(apiProviderTenantId);
} else {
tierPermissions = apiMgtDAO.getTierPermissions(apiProviderTenantId);
}
for (TierPermissionDTO tierPermission : tierPermissions) {
String type = tierPermission.getPermissionType();
List<String> currentRolesList = new ArrayList<String>(Arrays.asList(currentUserRoles));
List<String> roles = new ArrayList<String>(Arrays.asList(tierPermission.getRoles()));
currentRolesList.retainAll(roles);
if (APIConstants.TIER_PERMISSION_ALLOW.equals(type)) {
/* Current User is not allowed for this Tier*/
if (currentRolesList.isEmpty()) {
deniedTiers.add(tierPermission.getTierName());
}
} else {
/* Current User is denied for this Tier*/
if (currentRolesList.size() > 0) {
deniedTiers.add(tierPermission.getTierName());
}
}
}
}
} catch (org.wso2.carbon.user.api.UserStoreException e) {
log.error("cannot retrieve user role list for tenant" + tenantDomain, e);
}
return deniedTiers;
}
@Override
public Set<TierPermission> getTierPermissions() throws APIManagementException {
Set<TierPermission> tierPermissions = new HashSet<TierPermission>();
if (tenantId != 0) {
Set<TierPermissionDTO> tierPermissionDtos;
if (APIUtil.isAdvanceThrottlingEnabled()) {
tierPermissionDtos = apiMgtDAO.getThrottleTierPermissions(tenantId);
} else {
tierPermissionDtos = apiMgtDAO.getTierPermissions(tenantId);
}
for (TierPermissionDTO tierDto : tierPermissionDtos) {
TierPermission tierPermission = new TierPermission(tierDto.getTierName());
tierPermission.setRoles(tierDto.getRoles());
tierPermission.setPermissionType(tierDto.getPermissionType());
tierPermissions.add(tierPermission);
}
}
return tierPermissions;
}
/**
* Check whether given Tier is denied for the user
*
* @param tierName
* @return
* @throws APIManagementException if failed to get the tiers
*/
@Override
public boolean isTierDeneid(String tierName) throws APIManagementException {
String[] currentUserRoles;
try {
if (tenantId != 0) {
/* Get the roles of the Current User */
currentUserRoles = ((UserRegistry) ((UserAwareAPIConsumer) this).registry).
getUserRealm().getUserStoreManager().getRoleListOfUser(((UserRegistry) this.registry).getUserName());
TierPermissionDTO tierPermission;
if(APIUtil.isAdvanceThrottlingEnabled()){
tierPermission = apiMgtDAO.getThrottleTierPermission(tierName, tenantId);
}else{
tierPermission = apiMgtDAO.getTierPermission(tierName, tenantId);
}
if (tierPermission == null) {
return false;
} else {
List<String> currentRolesList = new ArrayList<String>(Arrays.asList(currentUserRoles));
List<String> roles = new ArrayList<String>(Arrays.asList(tierPermission.getRoles()));
currentRolesList.retainAll(roles);
if (APIConstants.TIER_PERMISSION_ALLOW.equals(tierPermission.getPermissionType())) {
if (currentRolesList.isEmpty()) {
return true;
}
} else {
if (currentRolesList.size() > 0) {
return true;
}
}
}
}
} catch (org.wso2.carbon.user.api.UserStoreException e) {
log.error("cannot retrieve user role list for tenant" + tenantDomain, e);
}
return false;
}
private boolean isTenantDomainNotMatching(String tenantDomain) {
if (this.tenantDomain != null) {
return !(this.tenantDomain.equals(tenantDomain));
}
return true;
}
@Override
public Set<API> searchAPI(String searchTerm, String searchType, String tenantDomain)
throws APIManagementException {
return null;
}
public Set<Scope> getScopesBySubscribedAPIs(List<APIIdentifier> identifiers)
throws APIManagementException {
return apiMgtDAO.getScopesBySubscribedAPIs(identifiers);
}
public String getScopesByToken(String accessToken) throws APIManagementException {
return null;
}
public Set<Scope> getScopesByScopeKeys(String scopeKeys, int tenantId)
throws APIManagementException {
return apiMgtDAO.getScopesByScopeKeys(scopeKeys, tenantId);
}
@Override
public String getGroupId(int appId) throws APIManagementException {
return apiMgtDAO.getGroupId(appId);
}
@Override
public String[] getGroupIds(String response) throws APIManagementException {
String groupingExtractorClass = APIUtil.getGroupingExtractorImplementation();
if (groupingExtractorClass != null) {
try {
LoginPostExecutor groupingExtractor = (LoginPostExecutor) APIUtil.getClassForName
(groupingExtractorClass).newInstance();
//switching 2.1.0 and 2.2.0
if (APIUtil.isMultiGroupAppSharingEnabled()) {
NewPostLoginExecutor newGroupIdListExtractor = (NewPostLoginExecutor) groupingExtractor;
return newGroupIdListExtractor.getGroupingIdentifierList(response);
} else {
String groupId = groupingExtractor.getGroupingIdentifiers(response);
return new String[] {groupId};
}
} catch (ClassNotFoundException e) {
String msg = groupingExtractorClass + " is not found in runtime";
log.error(msg, e);
throw new APIManagementException(msg, e);
} catch (ClassCastException e) {
String msg = "Cannot cast " + groupingExtractorClass + " NewPostLoginExecutor";
log.error(msg, e);
throw new APIManagementException(msg, e);
} catch (IllegalAccessException e) {
String msg = "Error occurred while invocation of getGroupingIdentifier method";
log.error(msg, e);
throw new APIManagementException(msg, e);
} catch (InstantiationException e) {
String msg = "Error occurred while instantiating " + groupingExtractorClass + " class";
log.error(msg, e);
throw new APIManagementException(msg, e);
}
}
return null;
}
/**
* Returns all applications associated with given subscriber, groupingId and search criteria.
*
* @param subscriber Subscriber
* @param groupingId The groupId to which the applications must belong.
* @param offset The offset.
* @param search The search string.
* @param sortColumn The sort column.
* @param sortOrder The sort order.
* @return Application[] The Applications.
* @throws APIManagementException
*/
@Override
public Application[] getApplicationsWithPagination(Subscriber subscriber, String groupingId, int start , int offset
, String search, String sortColumn, String sortOrder)
throws APIManagementException {
return apiMgtDAO.getApplicationsWithPagination(subscriber, groupingId, start, offset,
search, sortColumn, sortOrder);
}
/**
* Returns all applications associated with given subscriber and groupingId.
*
* @param subscriber The subscriber.
* @param groupingId The groupId to which the applications must belong.
* @return Application[] Array of applications.
* @throws APIManagementException
*/
@Override
public Application[] getApplications(Subscriber subscriber, String groupingId)
throws APIManagementException {
Application[] applications = apiMgtDAO.getApplications(subscriber, groupingId);
for (Application application : applications) {
Set<APIKey> keys = getApplicationKeys(application.getId());
for (APIKey key : keys) {
application.addKey(key);
}
}
return applications;
}
/**
* Returns all API keys associated with given application id.
*
* @param applicationId The id of the application.
* @return Set<APIKey> Set of API keys of the application.
* @throws APIManagementException
*/
protected Set<APIKey> getApplicationKeys(int applicationId) throws APIManagementException {
Set<APIKey> apiKeys = new HashSet<APIKey>();
APIKey productionKey = getApplicationKey(applicationId, APIConstants.API_KEY_TYPE_PRODUCTION);
if (productionKey != null) {
apiKeys.add(productionKey);
} else {
productionKey = apiMgtDAO.getKeyStatusOfApplication(APIConstants.API_KEY_TYPE_PRODUCTION, applicationId);
if (productionKey != null) {
productionKey.setType(APIConstants.API_KEY_TYPE_PRODUCTION);
apiKeys.add(productionKey);
}
}
APIKey sandboxKey = getApplicationKey(applicationId, APIConstants.API_KEY_TYPE_SANDBOX);
if (sandboxKey != null) {
apiKeys.add(sandboxKey);
} else {
sandboxKey = apiMgtDAO.getKeyStatusOfApplication(APIConstants.API_KEY_TYPE_SANDBOX, applicationId);
if (sandboxKey != null) {
sandboxKey.setType(APIConstants.API_KEY_TYPE_SANDBOX);
apiKeys.add(sandboxKey);
}
}
return apiKeys;
}
/**
* Returns the key associated with given application id and key type.
*
* @param applicationId Id of the Application.
* @param keyType The type of key.
* @return APIKey The key of the application.
* @throws APIManagementException
*/
protected APIKey getApplicationKey(int applicationId, String keyType) throws APIManagementException {
String consumerKey = apiMgtDAO.getConsumerkeyByApplicationIdAndKeyType(String.valueOf(applicationId), keyType);
if (StringUtils.isNotEmpty(consumerKey)) {
String consumerKeyStatus = apiMgtDAO.getKeyStatusOfApplication(keyType, applicationId).getState();
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
OAuthApplicationInfo oAuthApplicationInfo = keyManager.retrieveApplication(consumerKey);
AccessTokenInfo tokenInfo = keyManager.getAccessTokenByConsumerKey(consumerKey);
APIKey apiKey = new APIKey();
apiKey.setConsumerKey(consumerKey);
apiKey.setType(keyType);
apiKey.setState(consumerKeyStatus);
if (oAuthApplicationInfo != null) {
apiKey.setConsumerSecret(oAuthApplicationInfo.getClientSecret());
apiKey.setCallbackUrl(oAuthApplicationInfo.getCallBackURL());
if (oAuthApplicationInfo.getParameter(APIConstants.JSON_GRANT_TYPES) != null) {
apiKey.setGrantTypes(oAuthApplicationInfo.getParameter(APIConstants.JSON_GRANT_TYPES).toString());
}
}
if (tokenInfo != null) {
apiKey.setAccessToken(tokenInfo.getAccessToken());
apiKey.setValidityPeriod(tokenInfo.getValidityPeriod());
apiKey.setTokenScope(getScopeString(tokenInfo.getScopes()));
} else {
if (log.isDebugEnabled()) {
log.debug("Access token does not exist for Consumer Key: " + consumerKey);
}
}
return apiKey;
}
if (log.isDebugEnabled()) {
log.debug("Consumer key does not exist for Application Id: " + applicationId + " Key Type: " + keyType);
}
return null;
}
/**
* Returns a single string containing the provided array of scopes.
*
* @param scopes The array of scopes.
* @return String Single string containing the provided array of scopes.
*/
private String getScopeString(String[] scopes) {
return StringUtils.join(scopes, " ");
}
@Override
public Application[] getLightWeightApplications(Subscriber subscriber, String groupingId) throws
APIManagementException {
return apiMgtDAO.getLightWeightApplications(subscriber, groupingId);
}
/**
* @param userId Subscriber name.
* @param applicationName of the Application.
* @param tokenType Token type (PRODUCTION | SANDBOX)
* @param callbackUrl callback URL
* @param allowedDomains allowedDomains for token.
* @param validityTime validity time period.
* @param groupingId APIM application id.
* @param jsonString Callback URL for the Application.
* @param tokenScope Scopes for the requested tokens.
* @return
* @throws APIManagementException
*/
@Override
public OAuthApplicationInfo updateAuthClient(String userId, String applicationName,
String tokenType,
String callbackUrl, String[] allowedDomains,
String validityTime,
String tokenScope,
String groupingId,
String jsonString) throws APIManagementException {
boolean tenantFlowStarted = false;
try {
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
tenantFlowStarted = true;
}
Application application = ApplicationUtils.retrieveApplication(applicationName, userId, groupingId);
final String subscriberName = application.getSubscriber().getName();
boolean isCaseInsensitiveComparisons = Boolean.parseBoolean(getAPIManagerConfiguration().
getFirstProperty(APIConstants.API_STORE_FORCE_CI_COMPARISIONS));
boolean isUserAppOwner;
if (isCaseInsensitiveComparisons) {
isUserAppOwner = subscriberName.equalsIgnoreCase(userId);
} else {
isUserAppOwner = subscriberName.equals(userId);
}
if (!isUserAppOwner) {
throw new APIManagementException("user: " + userId + ", attempted to update OAuth application " +
"owned by: " + subscriberName);
}
//Create OauthAppRequest object by passing json String.
OAuthAppRequest oauthAppRequest = ApplicationUtils.createOauthAppRequest(applicationName, null, callbackUrl,
tokenScope, jsonString, application.getTokenType());
oauthAppRequest.getOAuthApplicationInfo().addParameter(ApplicationConstants.APP_KEY_TYPE, tokenType);
String consumerKey = apiMgtDAO.getConsumerKeyForApplicationKeyType(applicationName, userId, tokenType,
groupingId);
oauthAppRequest.getOAuthApplicationInfo().setClientId(consumerKey);
//get key manager instance.
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
//call update method.
OAuthApplicationInfo updatedAppInfo = keyManager.updateApplication(oauthAppRequest);
JSONObject appLogObject = new JSONObject();
appLogObject.put(APIConstants.AuditLogConstants.APPLICATION_NAME, updatedAppInfo.getClientName());
appLogObject.put("Updated Oauth app with Call back URL", callbackUrl);
appLogObject.put("Updated Oauth app with grant types", jsonString);
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.UPDATED, this.username);
return updatedAppInfo;
} finally {
if (tenantFlowStarted) {
endTenantFlow();
}
}
}
/**
* @param userId Subscriber name.
* @param applicationName of the Application.
* @param applicationId of the Application.
* @param tokenType Token type (PRODUCTION | SANDBOX)
* @param callbackUrl callback URL
* @param allowedDomains allowedDomains for token.
* @param validityTime validity time period.
* @param groupingId APIM application id.
* @param jsonString Callback URL for the Application.
* @param tokenScope Scopes for the requested tokens.
* @return
* @throws APIManagementException
*/
@Override
public OAuthApplicationInfo updateAuthClientByAppId(String userId, String applicationName, int applicationId,
String tokenType, String callbackUrl, String[] allowedDomains, String validityTime, String tokenScope,
String groupingId, String jsonString) throws APIManagementException {
boolean tenantFlowStarted = false;
try {
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
tenantFlowStarted = startTenantFlowForTenantDomain(tenantDomain);
}
Application application = ApplicationUtils.retrieveApplicationById(applicationId);
//Create OauthAppRequest object by passing json String.
OAuthAppRequest oauthAppRequest = ApplicationUtils.createOauthAppRequest(applicationName, null, callbackUrl,
tokenScope, jsonString, application.getTokenType());
oauthAppRequest.getOAuthApplicationInfo().addParameter(ApplicationConstants.APP_KEY_TYPE, tokenType);
String consumerKey = apiMgtDAO.getConsumerKeyForApplicationKeyType(applicationId, userId, tokenType,
groupingId);
oauthAppRequest.getOAuthApplicationInfo().setClientId(consumerKey);
//get key manager instance.
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
//call update method.
OAuthApplicationInfo updatedAppInfo = keyManager.updateApplication(oauthAppRequest);
JSONObject appLogObject = new JSONObject();
appLogObject.put(APIConstants.AuditLogConstants.APPLICATION_NAME, updatedAppInfo.getClientName());
appLogObject.put("Updated Oauth app with Call back URL", callbackUrl);
appLogObject.put("Updated Oauth app with grant types", jsonString);
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.UPDATED, this.username);
return updatedAppInfo;
} finally {
if (tenantFlowStarted) {
endTenantFlow();
}
}
}
/**
* This method perform delete oAuth application.
*
* @param consumerKey
* @throws APIManagementException
*/
@Override
public void deleteOAuthApplication(String consumerKey) throws APIManagementException {
//get key manager instance.
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
//delete oAuthApplication by calling key manager implementation
keyManager.deleteApplication(consumerKey);
Map<String, String> applicationIdAndTokenTypeMap =
apiMgtDAO.getApplicationIdAndTokenTypeByConsumerKey(consumerKey);
if (applicationIdAndTokenTypeMap != null) {
String applicationId = applicationIdAndTokenTypeMap.get("application_id");
String tokenType = applicationIdAndTokenTypeMap.get("token_type");
if (applicationId != null && tokenType != null) {
apiMgtDAO.deleteApplicationKeyMappingByConsumerKey(consumerKey);
apiMgtDAO.deleteApplicationRegistration(applicationId, tokenType);
}
}
}
@Override
public Application[] getApplicationsByOwner(String userId) throws APIManagementException {
return apiMgtDAO.getApplicationsByOwner(userId);
}
@Override
public boolean updateApplicationOwner(String userId, Application application) throws APIManagementException {
boolean isAppUpdated = false;
try {
RealmService realmService = ServiceReferenceHolder.getInstance().getRealmService();
int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(MultitenantUtils.getTenantDomain(username));
UserStoreManager userStoreManager = realmService.getTenantUserRealm(tenantId).getUserStoreManager();
String oldUserName = application.getSubscriber().getName();
String[] oldUserRoles = userStoreManager.getRoleListOfUser(MultitenantUtils.getTenantAwareUsername
(oldUserName));
String[] newUserRoles = userStoreManager.getRoleListOfUser(MultitenantUtils.getTenantAwareUsername
(userId));
List<String> roleList = new ArrayList<String>();
roleList.addAll(Arrays.asList(newUserRoles));
for (String role : oldUserRoles) {
if (role.contains(application.getName())) {
roleList.add(role);
}
}
String[] roleArr = roleList.toArray(new String[roleList.size()]);
APIManagerConfiguration config = getAPIManagerConfiguration();
String serverURL = config.getFirstProperty(APIConstants.AUTH_MANAGER_URL) + "UserAdmin";
String adminUsername = config.getFirstProperty(APIConstants.AUTH_MANAGER_USERNAME);
String adminPassword = config.getFirstProperty(APIConstants.AUTH_MANAGER_PASSWORD);
UserAdminStub userAdminStub = new UserAdminStub(serverURL);
CarbonUtils.setBasicAccessSecurityHeaders(adminUsername, adminPassword, userAdminStub._getServiceClient());
userAdminStub.updateRolesOfUser(userId, roleArr);
isAppUpdated = true;
} catch (org.wso2.carbon.user.api.UserStoreException e) {
handleException("Error when getting the tenant's UserStoreManager or when getting roles of user ", e);
} catch (RemoteException e) {
handleException("Server couldn't establish connection with auth manager ", e);
} catch (UserAdminUserAdminException e) {
handleException("Error when getting the tenant's UserStoreManager or when getting roles of user ", e);
}
if (isAppUpdated) {
isAppUpdated = apiMgtDAO.updateApplicationOwner(userId, application);
}
//todo update Outh application once the oauth component supports to update the owner
return isAppUpdated;
}
public JSONObject resumeWorkflow(Object[] args) {
JSONObject row = new JSONObject();
if (args != null && APIUtil.isStringArray(args)) {
String workflowReference = (String) args[0];
String status = (String) args[1];
String description = null;
if (args.length > 2 && args[2] != null) {
description = (String) args[2];
}
boolean isTenantFlowStarted = false;
try {
// if (workflowReference != null) {
WorkflowDTO workflowDTO = apiMgtDAO.retrieveWorkflow(workflowReference);
if (workflowDTO == null) {
log.error("Could not find workflow for reference " + workflowReference);
row.put("error", Boolean.TRUE);
row.put("statusCode", 500);
row.put("message", "Could not find workflow for reference " + workflowReference);
return row;
}
String tenantDomain = workflowDTO.getTenantDomain();
if (tenantDomain != null && !org.wso2.carbon.utils.multitenancy.MultitenantConstants
.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
isTenantFlowStarted = startTenantFlowForTenantDomain(tenantDomain);
}
workflowDTO.setWorkflowDescription(description);
workflowDTO.setStatus(WorkflowStatus.valueOf(status));
String workflowType = workflowDTO.getWorkflowType();
WorkflowExecutor workflowExecutor;
try {
workflowExecutor = getWorkflowExecutor(workflowType);
workflowExecutor.complete(workflowDTO);
} catch (WorkflowException e) {
throw new APIManagementException(e);
}
row.put("error", Boolean.FALSE);
row.put("statusCode", 200);
row.put("message", "Invoked workflow completion successfully.");
// }
} catch (IllegalArgumentException e) {
String msg = "Illegal argument provided. Valid values for status are APPROVED and REJECTED.";
log.error(msg, e);
row.put("error", Boolean.TRUE);
row.put("statusCode", 500);
row.put("message", msg);
} catch (APIManagementException e) {
String msg = "Error while resuming the workflow. ";
log.error(msg, e);
row.put("error", Boolean.TRUE);
row.put("statusCode", 500);
row.put("message", msg + e.getMessage());
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
}
return row;
}
protected void endTenantFlow() {
PrivilegedCarbonContext.endTenantFlow();
}
protected boolean startTenantFlowForTenantDomain(String tenantDomain) {
boolean isTenantFlowStarted = true;
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
return isTenantFlowStarted;
}
/**
* Returns a workflow executor
*
* @param workflowType Workflow executor type
* @return WorkflowExecutor of given type
* @throws WorkflowException if an error occurred while getting WorkflowExecutor
*/
protected WorkflowExecutor getWorkflowExecutor(String workflowType) throws WorkflowException {
return WorkflowExecutorFactory.getInstance().getWorkflowExecutor(workflowType);
}
@Override
public boolean isMonetizationEnabled(String tenantDomain) throws APIManagementException {
JSONObject apiTenantConfig = null;
try {
String content = apimRegistryService.getConfigRegistryResourceContent(tenantDomain, APIConstants.API_TENANT_CONF_LOCATION);
if (content != null) {
JSONParser parser = new JSONParser();
apiTenantConfig = (JSONObject) parser.parse(content);
}
} catch (UserStoreException e) {
handleException("UserStoreException thrown when getting API tenant config from registry", e);
} catch (RegistryException e) {
handleException("RegistryException thrown when getting API tenant config from registry", e);
} catch (ParseException e) {
handleException("ParseException thrown when passing API tenant config from registry", e);
}
return getTenantConfigValue(tenantDomain, apiTenantConfig, APIConstants.API_TENANT_CONF_ENABLE_MONITZATION_KEY);
}
private boolean getTenantConfigValue(String tenantDomain, JSONObject apiTenantConfig, String configKey) throws APIManagementException {
if (apiTenantConfig != null) {
Object value = apiTenantConfig.get(configKey);
if (value != null) {
return Boolean.parseBoolean(value.toString());
}
else {
throw new APIManagementException(configKey + " config does not exist for tenant " + tenantDomain);
}
}
return false;
}
/**
* To get the query to retrieve user role list query based on current role list.
*
* @return the query with user role list.
* @throws APIManagementException API Management Exception.
*/
private String getUserRoleListQuery() throws APIManagementException {
StringBuilder rolesQuery = new StringBuilder();
rolesQuery.append('(');
rolesQuery.append(APIConstants.NULL_USER_ROLE_LIST);
String[] userRoles = APIUtil.getListOfRoles((userNameWithoutChange != null)? userNameWithoutChange: username);
if (userRoles != null) {
for (String userRole : userRoles) {
rolesQuery.append(" OR ");
rolesQuery.append(ClientUtils.escapeQueryChars(APIUtil.sanitizeUserRole(userRole.toLowerCase())));
}
}
rolesQuery.append(")");
if(log.isDebugEnabled()) {
log.debug("User role list solr query " + APIConstants.STORE_VIEW_ROLES + "=" + rolesQuery.toString());
}
return APIConstants.STORE_VIEW_ROLES + "=" + rolesQuery.toString();
}
/**
* To get the current user's role list.
*
* @return user role list.
* @throws APIManagementException API Management Exception.
*/
private List<String> getUserRoleList() throws APIManagementException {
List<String> userRoleList;
if (userNameWithoutChange == null) {
userRoleList = new ArrayList<String>() {{
add(APIConstants.NULL_USER_ROLE_LIST);
}};
} else {
userRoleList = new ArrayList<String>(Arrays.asList(APIUtil.getListOfRoles(userNameWithoutChange)));
}
return userRoleList;
}
@Override
protected String getSearchQuery(String searchQuery) throws APIManagementException {
if (!isAccessControlRestrictionEnabled || ( userNameWithoutChange != null &&
APIUtil.hasPermission(userNameWithoutChange, APIConstants.Permissions
.APIM_ADMIN))) {
return searchQuery;
}
String criteria = getUserRoleListQuery();
if (searchQuery != null && !searchQuery.trim().isEmpty()) {
criteria = criteria + "&" + searchQuery;
}
return criteria;
}
@Override
public String getWSDLDocument(String username, String tenantDomain, String resourceUrl,
Map environmentDetails, Map apiDetails) throws APIManagementException {
if (username == null) {
username = APIConstants.END_USER_ANONYMOUS;
}
if (tenantDomain == null) {
tenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME;
}
Map<String, Object> docResourceMap = APIUtil.getDocument(username, resourceUrl, tenantDomain);
String wsdlContent = "";
if (log.isDebugEnabled()) {
log.debug("WSDL document resource availability: " + docResourceMap.isEmpty());
}
if (!docResourceMap.isEmpty()) {
try {
ByteArrayOutputStream arrayOutputStream = new ByteArrayOutputStream();
IOUtils.copy((InputStream) docResourceMap.get("Data"), arrayOutputStream);
String apiName = (String) apiDetails.get(API_NAME);
String apiVersion = (String) apiDetails.get(API_VERSION);
String apiProvider = (String) apiDetails.get(API_PROVIDER);
String environmentName = (String) environmentDetails.get(ENVIRONMENT_NAME);
String environmentType = (String) environmentDetails.get(ENVIRONMENT_TYPE);
if (log.isDebugEnabled()) {
log.debug("Published SOAP api gateway environment name: " + environmentName + " environment type: "
+ environmentType);
}
byte[] updatedWSDLContent = this.getUpdatedWSDLByEnvironment(resourceUrl,
arrayOutputStream.toByteArray(), environmentName, environmentType, apiName, apiVersion, apiProvider);
wsdlContent = new String(updatedWSDLContent);
} catch (IOException e) {
handleException("Error occurred while copying wsdl content into byte array stream for resource: "
+ resourceUrl, e);
}
} else {
handleException("No wsdl resource found for resource path: " + resourceUrl);
}
JSONObject data = new JSONObject();
data.put(APIConstants.DOCUMENTATION_RESOURCE_MAP_CONTENT_TYPE,
docResourceMap.get(APIConstants.DOCUMENTATION_RESOURCE_MAP_CONTENT_TYPE));
data.put(APIConstants.DOCUMENTATION_RESOURCE_MAP_NAME,
docResourceMap.get(APIConstants.DOCUMENTATION_RESOURCE_MAP_NAME));
data.put(APIConstants.DOCUMENTATION_RESOURCE_MAP_DATA, wsdlContent);
if (log.isDebugEnabled()) {
log.debug("Updated wsdl content details for wsdl resource: " + docResourceMap.get("name") + " is " +
data.toJSONString());
}
return data.toJSONString();
}
/**
* To check authorization of the API against current logged in user. If the user is not authorized an exception
* will be thrown.
*
* @param identifier API identifier
* @throws APIManagementException APIManagementException
*/
protected void checkAccessControlPermission(APIIdentifier identifier) throws APIManagementException {
if (identifier == null || !isAccessControlRestrictionEnabled) {
if (!isAccessControlRestrictionEnabled && log.isDebugEnabled() && identifier != null) {
log.debug(
"Publisher access control restriction is not enabled. Hence the API " + identifier.getApiName()
+ " should not be checked for further permission. Registry permission check "
+ "is sufficient");
}
return;
}
String apiPath = APIUtil.getAPIPath(identifier);
Registry registry;
try {
// Need user name with tenant domain to get correct domain name from
// MultitenantUtils.getTenantDomain(username)
String userNameWithTenantDomain = (userNameWithoutChange != null) ? userNameWithoutChange : username;
String apiTenantDomain = getTenantDomain(identifier);
int apiTenantId = getTenantManager().getTenantId(apiTenantDomain);
if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(apiTenantDomain)) {
APIUtil.loadTenantRegistry(apiTenantId);
}
if (this.tenantDomain == null || !this.tenantDomain.equals(apiTenantDomain)) { //cross tenant scenario
registry = getRegistryService().getGovernanceUserRegistry(
getTenantAwareUsername(APIUtil.replaceEmailDomainBack(identifier.getProviderName())),
apiTenantId);
} else {
registry = this.registry;
}
Resource apiResource = registry.get(apiPath);
String accessControlProperty = apiResource.getProperty(APIConstants.ACCESS_CONTROL);
if (accessControlProperty == null || accessControlProperty.trim().isEmpty() || accessControlProperty
.equalsIgnoreCase(APIConstants.NO_ACCESS_CONTROL)) {
if (log.isDebugEnabled()) {
log.debug("API in the path " + apiPath + " does not have any access control restriction");
}
return;
}
if (APIUtil.hasPermission(userNameWithTenantDomain, APIConstants.Permissions.APIM_ADMIN)) {
return;
}
String storeVisibilityRoles = apiResource.getProperty(APIConstants.STORE_VIEW_ROLES);
if (storeVisibilityRoles != null && !storeVisibilityRoles.trim().isEmpty()) {
String[] storeVisibilityRoleList = storeVisibilityRoles.split(",");
if (log.isDebugEnabled()) {
log.debug("API has restricted access to users with the roles : " + Arrays
.toString(storeVisibilityRoleList));
}
String[] userRoleList = APIUtil.getListOfRoles(userNameWithTenantDomain);
if (log.isDebugEnabled()) {
log.debug("User " + username + " has roles " + Arrays.toString(userRoleList));
}
for (String role : storeVisibilityRoleList) {
role = role.trim();
if (role.equalsIgnoreCase(APIConstants.NULL_USER_ROLE_LIST) || APIUtil
.compareRoleList(userRoleList, role)) {
return;
}
}
if (log.isDebugEnabled()) {
log.debug("API " + identifier + " cannot be accessed by user '" + username + "'. It "
+ "has a store visibility restriction");
}
throw new APIManagementException(
APIConstants.UN_AUTHORIZED_ERROR_MESSAGE + " view the API " + identifier);
}
} catch (RegistryException e) {
throw new APIManagementException(
"Registry Exception while trying to check the store visibility restriction of API " + identifier
.getApiName(), e);
} catch (org.wso2.carbon.user.api.UserStoreException e) {
String msg = "Failed to get API from : " + apiPath;
log.error(msg, e);
throw new APIManagementException(msg, e);
}
}
/**
* This method is used to get the updated wsdl with the respective environment apis are published
*
* @param wsdlResourcePath registry resource path to the wsdl
* @param wsdlContent wsdl resource content as byte array
* @param environmentType gateway environment type
* @return updated wsdl content with environment endpoints
* @throws APIManagementException
*/
private byte[] getUpdatedWSDLByEnvironment(String wsdlResourcePath, byte[] wsdlContent, String environmentName,
String environmentType, String apiName, String apiVersion, String apiProvider) throws APIManagementException {
APIMWSDLReader apimwsdlReader = new APIMWSDLReader(wsdlResourcePath);
Definition definition = apimwsdlReader.getWSDLDefinitionFromByteContent(wsdlContent, false);
byte[] updatedWSDLContent = null;
boolean isTenantFlowStarted = false;
try {
String tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(apiProvider));
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
isTenantFlowStarted = true;
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
}
RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService();
int tenantId;
UserRegistry registry;
try {
tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(tenantDomain);
APIUtil.loadTenantRegistry(tenantId);
registry = registryService.getGovernanceSystemRegistry(tenantId);
API api = null;
if (!StringUtils.isEmpty(apiName) && !StringUtils.isEmpty(apiVersion)) {
APIIdentifier apiIdentifier = new APIIdentifier(APIUtil.replaceEmailDomain(apiProvider), apiName, apiVersion);
if (log.isDebugEnabled()) {
log.debug("Api identifier for the soap api artifact: " + apiIdentifier + "for api name: "
+ apiName + ", version: " + apiVersion);
}
GenericArtifact apiArtifact = APIUtil.getAPIArtifact(apiIdentifier, registry);
api = APIUtil.getAPI(apiArtifact);
if (log.isDebugEnabled()) {
if (api != null) {
log.debug(
"Api context for the artifact with id:" + api.getId() + " is " + api.getContext());
} else {
log.debug("Api does not exist for api name: " + apiIdentifier.getApiName());
}
}
} else {
handleException("Artifact does not exist in the registry for api name: " + apiName +
" and version: " + apiVersion);
}
if (api != null) {
try {
apimwsdlReader.setServiceDefinition(definition, api, environmentName, environmentType);
if (log.isDebugEnabled()) {
log.debug("Soap api with context:" + api.getContext() + " in " + environmentName
+ " with environment type" + environmentType);
}
updatedWSDLContent = apimwsdlReader.getWSDL(definition);
} catch (APIManagementException e) {
handleException("Error occurred while processing the wsdl for api: " + api.getId());
}
} else {
handleException("Error while getting API object for wsdl artifact");
}
} catch (UserStoreException e) {
handleException("Error while reading tenant information", e);
} catch (RegistryException e) {
handleException("Error when create registry instance", e);
}
} finally {
if (isTenantFlowStarted) {
PrivilegedCarbonContext.endTenantFlow();
}
}
return updatedWSDLContent;
}
/**
* This method is used to get keys of custom attributes, configured by user
*
* @param userId user name of logged in user
* @return Array of JSONObject, contains keys of attributes
* @throws APIManagementException
*/
public JSONArray getAppAttributesFromConfig(String userId) throws APIManagementException {
String tenantDomain = MultitenantUtils.getTenantDomain(userId);
int tenantId = 0;
try {
tenantId = getTenantId(tenantDomain);
} catch (UserStoreException e) {
handleException("Error in getting tenantId of " + tenantDomain, e);
}
JSONArray applicationAttributes = null;
JSONObject applicationConfig = APIUtil.getAppAttributeKeysFromRegistry(tenantId);
try {
if (applicationConfig != null) {
applicationAttributes = (JSONArray) applicationConfig.get(APIConstants.ApplicationAttributes.ATTRIBUTES);
} else {
APIManagerConfiguration configuration = getAPIManagerConfiguration();
applicationAttributes = configuration.getApplicationAttributes();
}
} catch (NullPointerException e){
handleException("Error in reading configuration " + e.getMessage(), e);
}
return applicationAttributes;
}
/**
* This method is used to validate keys of custom attributes, configured by user
*
* @param application
* @param userId user name of logged in user
* @throws APIManagementException
*/
public void checkAppAttributes(Application application, String userId) throws APIManagementException {
JSONArray applicationAttributesFromConfig = getAppAttributesFromConfig(userId);
Map<String, String> applicationAttributes = application.getApplicationAttributes();
List attributeKeys = new ArrayList<String>();
int applicationId = application.getId();
int tenantId = 0;
Map<String, String> newApplicationAttributes = new HashMap<>();
String tenantDomain = MultitenantUtils.getTenantDomain(userId);
try {
tenantId = getTenantId(tenantDomain);
} catch (UserStoreException e) {
handleException("Error in getting tenantId of " + tenantDomain, e);
}
for (Object object : applicationAttributesFromConfig) {
JSONObject attribute = (JSONObject) object;
attributeKeys.add(attribute.get(APIConstants.ApplicationAttributes.ATTRIBUTE));
}
for (Object key : applicationAttributes.keySet()) {
if (!attributeKeys.contains(key)) {
apiMgtDAO.deleteApplicationAttributes((String) key, applicationId);
if (log.isDebugEnabled()) {
log.debug("Removing " + key + "from application - " + application.getName());
}
}
}
for (Object key : attributeKeys) {
if (!applicationAttributes.keySet().contains(key)) {
newApplicationAttributes.put((String) key, "");
}
}
apiMgtDAO.addApplicationAttributes(newApplicationAttributes, applicationId, tenantId);
}
}
| Adding null check for getApplicationById methods
| components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/APIConsumerImpl.java | Adding null check for getApplicationById methods |
|
Java | apache-2.0 | d0ec7c8e3ae07d63a8e066fdf97bc25a4db130b3 | 0 | quaddy-services/projekt-task-capturing,quaddy-services/projekt-task-capturing | package de.quaddy_services.ptc.enterprise;
public class CommentDuration implements Comparable<CommentDuration> {
private Long duration;
private String comment;
public CommentDuration(Long aDuration, String aComment) {
super();
duration = aDuration;
comment = aComment.trim();
}
public Long getDuration() {
return duration;
}
public void setDuration(Long aDuration) {
duration = aDuration;
}
public String getComment() {
return comment;
}
public void setComment(String aComment) {
comment = aComment;
}
@Override
public int compareTo(CommentDuration aO) {
int tempCompareTo = getDuration().compareTo(aO.getDuration());
if (Integer.MIN_VALUE == tempCompareTo) {
return 1;
} else if (Integer.MAX_VALUE == tempCompareTo) {
return -1;
}
return -tempCompareTo;
}
}
| src/main/java/de/quaddy_services/ptc/enterprise/CommentDuration.java | package de.quaddy_services.ptc.enterprise;
public class CommentDuration implements Comparable<CommentDuration> {
private Long duration;
private String comment;
public CommentDuration(Long aDuration, String aComment) {
super();
duration = aDuration;
comment = aComment.trim();
}
public Long getDuration() {
return duration;
}
public void setDuration(Long aDuration) {
duration = aDuration;
}
public String getComment() {
return comment;
}
public void setComment(String aComment) {
comment = aComment;
}
@Override
public int compareTo(CommentDuration aO) {
int tempCompareTo = getDuration().compareTo(aO.getDuration());
if (Integer.MIN_VALUE == tempCompareTo) {
return Integer.MAX_VALUE;
} else if (Integer.MAX_VALUE == tempCompareTo) {
return Integer.MIN_VALUE;
}
return -tempCompareTo;
}
}
| sonar (Simply return -1) | src/main/java/de/quaddy_services/ptc/enterprise/CommentDuration.java | sonar (Simply return -1) |
|
Java | apache-2.0 | ad44ae35b3b919711fa09fa222d6373437f09f7e | 0 | apache/lenya,apache/lenya,apache/lenya,apache/lenya | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.lenya.cms.site.usecases;
import org.apache.lenya.cms.publication.Document;
import org.apache.lenya.cms.publication.DocumentManager;
import org.apache.lenya.cms.publication.Publication;
import org.apache.lenya.cms.publication.util.DocumentHelper;
import org.apache.lenya.cms.repository.Node;
import org.apache.lenya.cms.usecase.DocumentUsecase;
import org.apache.lenya.cms.usecase.UsecaseException;
/**
* Delete a language version.
*
* @version $Id$
*/
public class DeleteLanguage extends DocumentUsecase {
/**
* @see org.apache.lenya.cms.usecase.AbstractUsecase#doCheckPreconditions()
*/
protected void doCheckPreconditions() throws Exception {
super.doCheckPreconditions();
if (hasErrors()) {
return;
}
if (!getSourceDocument().getArea().equals(Publication.AUTHORING_AREA)) {
addErrorMessage("This usecase can only be invoked in the authoring area!");
} else if (getSourceDocument().getLanguages().length == 1) {
addErrorMessage("The last language version cannot be removed.");
}
}
/**
* @see org.apache.lenya.cms.usecase.AbstractUsecase#getNodesToLock()
*/
protected Node[] getNodesToLock() throws UsecaseException {
Node docNode = getSourceDocument().getRepositoryNode();
Node siteNode = getSourceDocument().area().getSite().getRepositoryNode();
Node[] nodes = { docNode, siteNode };
return nodes;
}
/**
* @see org.apache.lenya.cms.usecase.AbstractUsecase#doExecute()
*/
protected void doExecute() throws Exception {
super.doExecute();
Document document = getSourceDocument();
document.getLink().delete();
document.delete();
setTargetDocument(DocumentHelper.getExistingLanguageVersion(document));
}
}
| src/modules-core/sitemanagement/java/src/org/apache/lenya/cms/site/usecases/DeleteLanguage.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.lenya.cms.site.usecases;
import org.apache.lenya.cms.publication.Document;
import org.apache.lenya.cms.publication.DocumentManager;
import org.apache.lenya.cms.publication.Publication;
import org.apache.lenya.cms.publication.util.DocumentHelper;
import org.apache.lenya.cms.repository.Node;
import org.apache.lenya.cms.usecase.DocumentUsecase;
import org.apache.lenya.cms.usecase.UsecaseException;
/**
* Delete a language version.
*
* @version $Id$
*/
public class DeleteLanguage extends DocumentUsecase {
/**
* @see org.apache.lenya.cms.usecase.AbstractUsecase#doCheckPreconditions()
*/
protected void doCheckPreconditions() throws Exception {
super.doCheckPreconditions();
if (hasErrors()) {
return;
}
if (!getSourceDocument().getArea().equals(Publication.AUTHORING_AREA)) {
addErrorMessage("This usecase can only be invoked in the authoring area!");
} else if (getSourceDocument().getLanguages().length == 1) {
addErrorMessage("The last language version cannot be removed.");
}
}
/**
* @see org.apache.lenya.cms.usecase.AbstractUsecase#getNodesToLock()
*/
protected Node[] getNodesToLock() throws UsecaseException {
Node docNode = getSourceDocument().getRepositoryNode();
Node siteNode = getSourceDocument().area().getSite().getRepositoryNode();
Node[] nodes = { docNode, siteNode };
return nodes;
}
/**
* @see org.apache.lenya.cms.usecase.AbstractUsecase#doExecute()
*/
protected void doExecute() throws Exception {
super.doExecute();
Document document = getSourceDocument();
DocumentManager documentManager = null;
try {
documentManager = (DocumentManager) this.manager.lookup(DocumentManager.ROLE);
documentManager.delete(document);
} finally {
if (documentManager != null) {
this.manager.release(documentManager);
}
}
setTargetDocument(DocumentHelper.getExistingLanguageVersion(document));
}
}
| Simplify document deletion in DeleteLanguage usecase
git-svn-id: c334bb69c16d150e1b06e84516f7aa90b3181ca2@534330 13f79535-47bb-0310-9956-ffa450edef68
| src/modules-core/sitemanagement/java/src/org/apache/lenya/cms/site/usecases/DeleteLanguage.java | Simplify document deletion in DeleteLanguage usecase |
|
Java | apache-2.0 | 6922e116554b797421cda14cf27135f7de363c09 | 0 | epall/selenium,epall/selenium,epall/selenium,epall/selenium,epall/selenium,epall/selenium,epall/selenium,epall/selenium | package org.openqa.selenium.chrome;
import org.openqa.selenium.By;
import org.openqa.selenium.RenderedWebElement;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.remote.DriverCommand;
import org.openqa.selenium.internal.FindsByClassName;
import org.openqa.selenium.internal.FindsByCssSelector;
import org.openqa.selenium.internal.FindsById;
import org.openqa.selenium.internal.FindsByLinkText;
import org.openqa.selenium.internal.FindsByName;
import org.openqa.selenium.internal.FindsByTagName;
import org.openqa.selenium.internal.FindsByXPath;
import org.openqa.selenium.internal.Locatable;
import org.openqa.selenium.internal.WrapsElement;
import java.awt.Dimension;
import java.awt.Point;
import java.util.List;
public class ChromeWebElement implements RenderedWebElement, Locatable,
FindsByXPath, FindsByLinkText, FindsById, FindsByName, FindsByTagName, FindsByClassName, FindsByCssSelector {
private final ChromeDriver parent;
private final String elementId;
public ChromeWebElement(ChromeDriver parent, String elementId) {
this.parent = parent;
this.elementId = elementId;
}
String getElementId() {
return elementId;
}
ChromeResponse execute(DriverCommand driverCommand, Object... parameters) {
return parent.execute(driverCommand, parameters);
}
public void dragAndDropBy(int moveRightBy, int moveDownBy) {
throw new UnsupportedOperationException("Not yet supported in Chrome");
}
public void dragAndDropOn(RenderedWebElement element) {
throw new UnsupportedOperationException("Not yet supported in Chrome");
}
public Point getLocation() {
return (Point)parent.execute(DriverCommand.GET_ELEMENT_LOCATION, this).getValue();
}
public Dimension getSize() {
return (Dimension)parent.execute(DriverCommand.GET_ELEMENT_SIZE, this).getValue();
}
public String getValueOfCssProperty(String propertyName) {
return parent.execute(DriverCommand.GET_ELEMENT_VALUE_OF_CSS_PROPERTY, this, propertyName)
.getValue().toString();
}
public boolean isDisplayed() {
ChromeResponse r = execute(DriverCommand.IS_ELEMENT_DISPLAYED, this);
return (Boolean)r.getValue();
}
public void clear() {
parent.execute(DriverCommand.CLEAR_ELEMENT, this);
}
public void click() {
parent.execute(DriverCommand.CLICK_ELEMENT, this);
}
public WebElement findElement(By by) {
return by.findElement(this);
}
public List<WebElement> findElements(By by) {
return by.findElements(this);
}
public String getAttribute(String name) {
Object value = execute(DriverCommand.GET_ELEMENT_ATTRIBUTE, this, name).getValue();
return (value == null) ? null : value.toString();
}
public String getTagName() {
return execute(DriverCommand.GET_ELEMENT_TAG_NAME, this).getValue().toString();
}
public String getText() {
return execute(DriverCommand.GET_ELEMENT_TEXT, this).getValue().toString();
}
public String getValue() {
return execute(DriverCommand.GET_ELEMENT_VALUE, this).getValue().toString();
}
public boolean isEnabled() {
return Boolean.parseBoolean(execute(DriverCommand.IS_ELEMENT_ENABLED, this).getValue().toString());
}
public boolean isSelected() {
return Boolean.parseBoolean(execute(DriverCommand.IS_ELEMENT_SELECTED, this)
.getValue().toString());
}
public void sendKeys(CharSequence... keysToSend) {
StringBuilder builder = new StringBuilder();
for (CharSequence seq : keysToSend) {
builder.append(seq);
}
execute(DriverCommand.SEND_KEYS_TO_ELEMENT, this, builder.toString());
}
public void setSelected() {
execute(DriverCommand.SET_ELEMENT_SELECTED, this);
}
public void submit() {
execute(DriverCommand.SUBMIT_ELEMENT, this);
}
public boolean toggle() {
return Boolean.parseBoolean(execute(DriverCommand.TOGGLE_ELEMENT, this)
.getValue().toString());
}
public Point getLocationOnScreenOnceScrolledIntoView() {
return (Point)parent.execute(DriverCommand.GET_ELEMENT_LOCATION_ONCE_SCROLLED_INTO_VIEW, this).getValue();
}
public WebElement findElementByXPath(String using) {
return parent.getElementFrom(execute(DriverCommand.FIND_CHILD_ELEMENT, this, "xpath", using));
}
public List<WebElement> findElementsByXPath(String using) {
return parent.getElementsFrom(execute(DriverCommand.FIND_CHILD_ELEMENTS, this, "xpath", using));
}
public WebElement findElementByLinkText(String using) {
return parent.getElementFrom(execute(DriverCommand.FIND_CHILD_ELEMENT, this, "link text", using));
}
public WebElement findElementByPartialLinkText(String using) {
return parent.getElementFrom(execute(DriverCommand.FIND_CHILD_ELEMENT, this, "partial link text", using));
}
public List<WebElement> findElementsByLinkText(String using) {
return parent.getElementsFrom(execute(DriverCommand.FIND_CHILD_ELEMENTS, this, "link text", using));
}
public List<WebElement> findElementsByPartialLinkText(String using) {
return parent.getElementsFrom(execute(DriverCommand.FIND_CHILD_ELEMENTS, this, "partial link text", using));
}
public WebElement findElementById(String using) {
return parent.getElementFrom(execute(DriverCommand.FIND_CHILD_ELEMENT, this, "id", using));
}
public List<WebElement> findElementsById(String using) {
return parent.getElementsFrom(execute(DriverCommand.FIND_CHILD_ELEMENTS, this, "id", using));
}
public WebElement findElementByName(String using) {
return parent.getElementFrom(execute(DriverCommand.FIND_CHILD_ELEMENT, this, "name", using));
}
public List<WebElement> findElementsByName(String using) {
return parent.getElementsFrom(execute(DriverCommand.FIND_CHILD_ELEMENTS, this, "name", using));
}
public WebElement findElementByTagName(String using) {
return parent.getElementFrom(execute(DriverCommand.FIND_CHILD_ELEMENT, this, "tag name", using));
}
public List<WebElement> findElementsByTagName(String using) {
return parent.getElementsFrom(execute(DriverCommand.FIND_CHILD_ELEMENTS, this, "tag name", using));
}
public WebElement findElementByClassName(String using) {
return parent.getElementFrom(execute(DriverCommand.FIND_CHILD_ELEMENT, this, "class name", using));
}
public List<WebElement> findElementsByClassName(String using) {
return parent.getElementsFrom(execute(DriverCommand.FIND_CHILD_ELEMENTS, this, "class name", using));
}
public WebElement findElementByCssSelector(String using) {
return parent.getElementFrom(execute(DriverCommand.FIND_CHILD_ELEMENT, this, "css", using));
}
public List<WebElement> findElementsByCssSelector(String using) {
return parent.getElementsFrom(execute(DriverCommand.FIND_CHILD_ELEMENTS, this, "css", using));
}
public void hover() {
//Relies on the user not moving the mouse after the hover moves it into place
execute(DriverCommand.HOVER_OVER_ELEMENT, this);
}
@Override
public int hashCode() {
return elementId.hashCode();
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof WebElement)) {
return false;
}
WebElement other = (WebElement) obj;
if (other instanceof WrapsElement) {
other = ((WrapsElement) obj).getWrappedElement();
}
if (!(other instanceof ChromeWebElement)) {
return false;
}
return elementId.equals(((ChromeWebElement)other).elementId);
}
}
| chrome/src/java/org/openqa/selenium/chrome/ChromeWebElement.java | package org.openqa.selenium.chrome;
import org.openqa.selenium.By;
import org.openqa.selenium.RenderedWebElement;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.remote.DriverCommand;
import org.openqa.selenium.internal.FindsByClassName;
import org.openqa.selenium.internal.FindsByCssSelector;
import org.openqa.selenium.internal.FindsById;
import org.openqa.selenium.internal.FindsByLinkText;
import org.openqa.selenium.internal.FindsByName;
import org.openqa.selenium.internal.FindsByTagName;
import org.openqa.selenium.internal.FindsByXPath;
import org.openqa.selenium.internal.Locatable;
import org.openqa.selenium.internal.WrapsElement;
import java.awt.Dimension;
import java.awt.Point;
import java.util.List;
public class ChromeWebElement implements RenderedWebElement, Locatable,
FindsByXPath, FindsByLinkText, FindsById, FindsByName, FindsByTagName, FindsByClassName, FindsByCssSelector {
private final ChromeDriver parent;
private final String elementId;
public ChromeWebElement(ChromeDriver parent, String elementId) {
this.parent = parent;
this.elementId = elementId;
}
String getElementId() {
return elementId;
}
ChromeResponse execute(DriverCommand driverCommand, Object... parameters) {
return parent.execute(driverCommand, parameters);
}
public void dragAndDropBy(int moveRightBy, int moveDownBy) {
throw new UnsupportedOperationException("Not yet supported in Chrome");
}
public void dragAndDropOn(RenderedWebElement element) {
throw new UnsupportedOperationException("Not yet supported in Chrome");
}
public Point getLocation() {
return (Point)parent.execute(DriverCommand.GET_ELEMENT_LOCATION, this).getValue();
}
public Dimension getSize() {
return (Dimension)parent.execute(DriverCommand.GET_ELEMENT_SIZE, this).getValue();
}
public String getValueOfCssProperty(String propertyName) {
return parent.execute(DriverCommand.GET_ELEMENT_VALUE_OF_CSS_PROPERTY, this, propertyName)
.getValue().toString();
}
public boolean isDisplayed() {
ChromeResponse r = execute(DriverCommand.IS_ELEMENT_DISPLAYED, this);
return (Boolean)r.getValue();
}
public void clear() {
parent.execute(DriverCommand.CLEAR_ELEMENT, this);
}
public void click() {
parent.execute(DriverCommand.CLICK_ELEMENT, this);
}
public WebElement findElement(By by) {
return by.findElement(this);
}
public List<WebElement> findElements(By by) {
return by.findElements(this);
}
public String getAttribute(String name) {
Object value = execute(DriverCommand.GET_ELEMENT_ATTRIBUTE, this, name).getValue();
return (value == null) ? null : value.toString();
}
public String getTagName() {
return execute(DriverCommand.GET_ELEMENT_TAG_NAME, this).getValue().toString();
}
public String getText() {
return execute(DriverCommand.GET_ELEMENT_TEXT, this).getValue().toString();
}
public String getValue() {
return execute(DriverCommand.GET_ELEMENT_VALUE, this).getValue().toString();
}
public boolean isEnabled() {
return Boolean.parseBoolean(execute(DriverCommand.IS_ELEMENT_ENABLED, this).getValue().toString());
}
public boolean isSelected() {
return Boolean.parseBoolean(execute(DriverCommand.IS_ELEMENT_SELECTED, this)
.getValue().toString());
}
public void sendKeys(CharSequence... keysToSend) {
StringBuilder builder = new StringBuilder();
for (CharSequence seq : keysToSend) {
builder.append(seq);
}
execute(DriverCommand.SEND_KEYS_TO_ELEMENT, this, builder.toString());
}
public void setSelected() {
execute(DriverCommand.SET_ELEMENT_SELECTED, this);
}
public void submit() {
execute(DriverCommand.SUBMIT_ELEMENT, this);
}
public boolean toggle() {
return Boolean.parseBoolean(execute(DriverCommand.TOGGLE_ELEMENT, this)
.getValue().toString());
}
public Point getLocationOnScreenOnceScrolledIntoView() {
return (Point)parent.execute(DriverCommand.GET_ELEMENT_LOCATION_ONCE_SCROLLED_INTO_VIEW, this).getValue();
}
public WebElement findElementByXPath(String using) {
return parent.getElementFrom(execute(DriverCommand.FIND_CHILD_ELEMENT, this, "xpath", using));
}
public List<WebElement> findElementsByXPath(String using) {
return parent.getElementsFrom(execute(DriverCommand.FIND_CHILD_ELEMENTS, this, "xpath", using));
}
public WebElement findElementByLinkText(String using) {
return parent.getElementFrom(execute(DriverCommand.FIND_CHILD_ELEMENT, this, "link text", using));
}
public WebElement findElementByPartialLinkText(String using) {
return parent.getElementFrom(execute(DriverCommand.FIND_CHILD_ELEMENT, this, "partial link text", using));
}
public List<WebElement> findElementsByLinkText(String using) {
return parent.getElementsFrom(execute(DriverCommand.FIND_CHILD_ELEMENTS, this, "link text", using));
}
public List<WebElement> findElementsByPartialLinkText(String using) {
return parent.getElementsFrom(execute(DriverCommand.FIND_CHILD_ELEMENTS, this, "partial link text", using));
}
public WebElement findElementById(String using) {
return parent.getElementFrom(execute(DriverCommand.FIND_CHILD_ELEMENT, this, "id", using));
}
public List<WebElement> findElementsById(String using) {
return parent.getElementsFrom(execute(DriverCommand.FIND_CHILD_ELEMENTS, this, "id", using));
}
public WebElement findElementByName(String using) {
return parent.getElementFrom(execute(DriverCommand.FIND_CHILD_ELEMENT, this, "name", using));
}
public List<WebElement> findElementsByName(String using) {
return parent.getElementsFrom(execute(DriverCommand.FIND_CHILD_ELEMENTS, this, "name", using));
}
public WebElement findElementByTagName(String using) {
return parent.getElementFrom(execute(DriverCommand.FIND_CHILD_ELEMENT, this, "tag name", using));
}
public List<WebElement> findElementsByTagName(String using) {
return parent.getElementsFrom(execute(DriverCommand.FIND_CHILD_ELEMENTS, this, "tag name", using));
}
public WebElement findElementByClassName(String using) {
return parent.getElementFrom(execute(DriverCommand.FIND_CHILD_ELEMENT, this, "class name", using));
}
public List<WebElement> findElementsByClassName(String using) {
return parent.getElementsFrom(execute(DriverCommand.FIND_CHILD_ELEMENTS, this, "class name", using));
}
@Override
public WebElement findElementByCssSelector(String using) {
return parent.getElementFrom(execute(DriverCommand.FIND_CHILD_ELEMENT, this, "css", using));
}
public List<WebElement> findElementsByCssSelector(String using) {
return parent.getElementsFrom(execute(DriverCommand.FIND_CHILD_ELEMENTS, this, "css", using));
}
public void hover() {
//Relies on the user not moving the mouse after the hover moves it into place
execute(DriverCommand.HOVER_OVER_ELEMENT, this);
}
@Override
public int hashCode() {
return elementId.hashCode();
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof WebElement)) {
return false;
}
WebElement other = (WebElement) obj;
if (other instanceof WrapsElement) {
other = ((WrapsElement) obj).getWrappedElement();
}
if (!(other instanceof ChromeWebElement)) {
return false;
}
return elementId.equals(((ChromeWebElement)other).elementId);
}
}
| JasonLeyba: Removing invalid @Override annotation from r7988
git-svn-id: 4179480af2c2519a5eb5e1e9b541cbdf5cf27696@7994 07704840-8298-11de-bf8c-fd130f914ac9
| chrome/src/java/org/openqa/selenium/chrome/ChromeWebElement.java | JasonLeyba: Removing invalid @Override annotation from r7988 |
|
Java | apache-2.0 | f9d56a2c8be9561fbba44bf5f784ddb6f0f3be22 | 0 | wskplho/android-pull-to-refresh,naver/android-pull-to-refresh,androidjhent/naver,LinKingR/android-pull-to-refresh,Ranjan101/android-pull-to-refresh,wskplho/android-pull-to-refresh,Ranjan101/android-pull-to-refresh,androidjhent/naver.v2,toker/android-pull-to-refresh,handstudio/android-pull-to-refresh | /*******************************************************************************
* Copyright 2011, 2012 Chris Banes.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.handmark.pulltorefresh.library;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.drawable.Drawable;
import android.os.Build.VERSION;
import android.os.Build.VERSION_CODES;
import android.os.Bundle;
import android.os.Parcelable;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Gravity;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import android.view.animation.DecelerateInterpolator;
import android.view.animation.Interpolator;
import android.widget.FrameLayout;
import android.widget.LinearLayout;
import com.handmark.pulltorefresh.library.internal.FlipLoadingLayout;
import com.handmark.pulltorefresh.library.internal.LoadingLayout;
import com.handmark.pulltorefresh.library.internal.RotateLoadingLayout;
import com.handmark.pulltorefresh.library.internal.Utils;
import com.handmark.pulltorefresh.library.internal.ViewCompat;
public abstract class PullToRefreshBase<T extends View> extends LinearLayout implements IPullToRefresh<T> {
// ===========================================================
// Constants
// ===========================================================
static final boolean DEBUG = true;
static final String LOG_TAG = "PullToRefresh";
static final float FRICTION = 2.0f;
public static final int SMOOTH_SCROLL_DURATION_MS = 200;
public static final int SMOOTH_SCROLL_LONG_DURATION_MS = 325;
static final int DEMO_SCROLL_INTERVAL = 225;
static final String STATE_STATE = "ptr_state";
static final String STATE_MODE = "ptr_mode";
static final String STATE_CURRENT_MODE = "ptr_current_mode";
static final String STATE_SCROLLING_REFRESHING_ENABLED = "ptr_disable_scrolling";
static final String STATE_SHOW_REFRESHING_VIEW = "ptr_show_refreshing_view";
static final String STATE_SUPER = "ptr_super";
// ===========================================================
// Fields
// ===========================================================
private int mTouchSlop;
private float mLastMotionX, mLastMotionY;
private float mInitialMotionX, mInitialMotionY;
private boolean mIsBeingDragged = false;
private State mState = State.RESET;
private Mode mMode = Mode.getDefault();
private Mode mCurrentMode;
T mRefreshableView;
private FrameLayout mRefreshableViewWrapper;
private boolean mShowViewWhileRefreshing = true;
private boolean mScrollingWhileRefreshingEnabled = false;
private boolean mFilterTouchEvents = true;
private boolean mOverScrollEnabled = true;
private boolean mLayoutVisibilityChangesEnabled = true;
private Interpolator mScrollAnimationInterpolator;
private AnimationStyle mLoadingAnimationStyle = AnimationStyle.getDefault();
private LoadingLayout mHeaderLayout;
private LoadingLayout mFooterLayout;
private OnRefreshListener<T> mOnRefreshListener;
private OnRefreshListener2<T> mOnRefreshListener2;
private OnPullEventListener<T> mOnPullEventListener;
private SmoothScrollRunnable mCurrentSmoothScrollRunnable;
// ===========================================================
// Constructors
// ===========================================================
public PullToRefreshBase(Context context) {
super(context);
init(context, null);
}
public PullToRefreshBase(Context context, AttributeSet attrs) {
super(context, attrs);
init(context, attrs);
}
public PullToRefreshBase(Context context, Mode mode) {
super(context);
mMode = mode;
init(context, null);
}
public PullToRefreshBase(Context context, Mode mode, AnimationStyle animStyle) {
super(context);
mMode = mode;
mLoadingAnimationStyle = animStyle;
init(context, null);
}
@Override
public void addView(View child, int index, ViewGroup.LayoutParams params) {
if (DEBUG) {
Log.d(LOG_TAG, "addView: " + child.getClass().getSimpleName());
}
final T refreshableView = getRefreshableView();
if (refreshableView instanceof ViewGroup) {
((ViewGroup) refreshableView).addView(child, index, params);
} else {
throw new UnsupportedOperationException("Refreshable View is not a ViewGroup so can't addView");
}
}
@Override
public final boolean demo() {
if (mMode.showHeaderLoadingLayout() && isReadyForPullStart()) {
smoothScrollToAndBack(-getHeaderSize() * 2);
return true;
} else if (mMode.showFooterLoadingLayout() && isReadyForPullEnd()) {
smoothScrollToAndBack(getFooterSize() * 2);
return true;
}
return false;
}
@Override
public final Mode getCurrentMode() {
return mCurrentMode;
}
@Override
public final boolean getFilterTouchEvents() {
return mFilterTouchEvents;
}
@Override
public final ILoadingLayout getLoadingLayoutProxy() {
return getLoadingLayoutProxy(true, true);
}
@Override
public final ILoadingLayout getLoadingLayoutProxy(boolean includeStart, boolean includeEnd) {
return createLoadingLayoutProxy(includeStart, includeEnd);
}
@Override
public final Mode getMode() {
return mMode;
}
@Override
public final T getRefreshableView() {
return mRefreshableView;
}
@Override
public final boolean getShowViewWhileRefreshing() {
return mShowViewWhileRefreshing;
}
@Override
public final State getState() {
return mState;
}
/**
* @deprecated See {@link #isScrollingWhileRefreshingEnabled()}.
*/
public final boolean isDisableScrollingWhileRefreshing() {
return !isScrollingWhileRefreshingEnabled();
}
@Override
public final boolean isPullToRefreshEnabled() {
return mMode.permitsPullToRefresh();
}
@Override
public final boolean isPullToRefreshOverScrollEnabled() {
return VERSION.SDK_INT >= VERSION_CODES.GINGERBREAD && mOverScrollEnabled
&& OverscrollHelper.isAndroidOverScrollEnabled(mRefreshableView);
}
@Override
public final boolean isRefreshing() {
return mState == State.REFRESHING || mState == State.MANUAL_REFRESHING;
}
@Override
public final boolean isScrollingWhileRefreshingEnabled() {
return mScrollingWhileRefreshingEnabled;
}
@Override
public final boolean onInterceptTouchEvent(MotionEvent event) {
if (!isPullToRefreshEnabled()) {
return false;
}
final int action = event.getAction();
if (action == MotionEvent.ACTION_CANCEL || action == MotionEvent.ACTION_UP) {
mIsBeingDragged = false;
return false;
}
if (action != MotionEvent.ACTION_DOWN && mIsBeingDragged) {
return true;
}
switch (action) {
case MotionEvent.ACTION_MOVE: {
// If we're refreshing, and the flag is set. Eat all MOVE events
if (!mScrollingWhileRefreshingEnabled && isRefreshing()) {
return true;
}
if (isReadyForPull()) {
final float y = event.getY(), x = event.getX();
final float diff, oppositeDiff, absDiff;
// We need to use the correct values, based on scroll
// direction
switch (getPullToRefreshScrollDirection()) {
case HORIZONTAL:
diff = x - mLastMotionX;
oppositeDiff = y - mLastMotionY;
break;
case VERTICAL:
default:
diff = y - mLastMotionY;
oppositeDiff = x - mLastMotionX;
break;
}
absDiff = Math.abs(diff);
if (absDiff > mTouchSlop && (!mFilterTouchEvents || absDiff > Math.abs(oppositeDiff))) {
if (mMode.showHeaderLoadingLayout() && diff >= 1f && isReadyForPullStart()) {
mLastMotionY = y;
mLastMotionX = x;
mIsBeingDragged = true;
if (mMode == Mode.BOTH) {
mCurrentMode = Mode.PULL_FROM_START;
}
} else if (mMode.showFooterLoadingLayout() && diff <= -1f && isReadyForPullEnd()) {
mLastMotionY = y;
mLastMotionX = x;
mIsBeingDragged = true;
if (mMode == Mode.BOTH) {
mCurrentMode = Mode.PULL_FROM_END;
}
}
}
}
break;
}
case MotionEvent.ACTION_DOWN: {
if (isReadyForPull()) {
mLastMotionY = mInitialMotionY = event.getY();
mLastMotionX = mInitialMotionX = event.getX();
mIsBeingDragged = false;
}
break;
}
}
return mIsBeingDragged;
}
@Override
public final void onRefreshComplete() {
if (isRefreshing()) {
setState(State.RESET);
}
}
@Override
public final boolean onTouchEvent(MotionEvent event) {
if (!isPullToRefreshEnabled()) {
return false;
}
// If we're refreshing, and the flag is set. Eat the event
if (!mScrollingWhileRefreshingEnabled && isRefreshing()) {
return true;
}
if (event.getAction() == MotionEvent.ACTION_DOWN && event.getEdgeFlags() != 0) {
return false;
}
switch (event.getAction()) {
case MotionEvent.ACTION_MOVE: {
if (mIsBeingDragged) {
mLastMotionY = event.getY();
mLastMotionX = event.getX();
pullEvent();
return true;
}
break;
}
case MotionEvent.ACTION_DOWN: {
if (isReadyForPull()) {
mLastMotionY = mInitialMotionY = event.getY();
mLastMotionX = mInitialMotionX = event.getX();
return true;
}
break;
}
case MotionEvent.ACTION_CANCEL:
case MotionEvent.ACTION_UP: {
if (mIsBeingDragged) {
mIsBeingDragged = false;
if (mState == State.RELEASE_TO_REFRESH) {
if (null != mOnRefreshListener) {
setState(State.REFRESHING, true);
mOnRefreshListener.onRefresh(this);
return true;
} else if (null != mOnRefreshListener2) {
setState(State.REFRESHING, true);
if (mCurrentMode == Mode.PULL_FROM_START) {
mOnRefreshListener2.onPullDownToRefresh(this);
} else if (mCurrentMode == Mode.PULL_FROM_END) {
mOnRefreshListener2.onPullUpToRefresh(this);
}
return true;
}
}
// If we're already refreshing, just scroll back to the top
if (isRefreshing()) {
smoothScrollTo(0);
return true;
}
// If we haven't returned by here, then we're not in a state
// to pull, so just reset
setState(State.RESET);
return true;
}
break;
}
}
return false;
}
public final void setScrollingWhileRefreshingEnabled(boolean allowScrollingWhileRefreshing) {
mScrollingWhileRefreshingEnabled = allowScrollingWhileRefreshing;
}
/**
* @deprecated See {@link #setScrollingWhileRefreshingEnabled(boolean)}
*/
public void setDisableScrollingWhileRefreshing(boolean disableScrollingWhileRefreshing) {
setScrollingWhileRefreshingEnabled(!disableScrollingWhileRefreshing);
}
@Override
public final void setFilterTouchEvents(boolean filterEvents) {
mFilterTouchEvents = filterEvents;
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy()}.
*/
public void setLastUpdatedLabel(CharSequence label) {
getLoadingLayoutProxy().setLastUpdatedLabel(label);
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy()}.
*/
public void setLoadingDrawable(Drawable drawable) {
getLoadingLayoutProxy().setLoadingDrawable(drawable);
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy(boolean, boolean)}.
*/
public void setLoadingDrawable(Drawable drawable, Mode mode) {
getLoadingLayoutProxy(mode.showHeaderLoadingLayout(), mode.showFooterLoadingLayout()).setLoadingDrawable(
drawable);
}
@Override
public void setLongClickable(boolean longClickable) {
getRefreshableView().setLongClickable(longClickable);
}
@Override
public final void setMode(Mode mode) {
if (mode != mMode) {
if (DEBUG) {
Log.d(LOG_TAG, "Setting mode to: " + mode);
}
mMode = mode;
updateUIForMode();
}
}
public void setOnPullEventListener(OnPullEventListener<T> listener) {
mOnPullEventListener = listener;
}
@Override
public final void setOnRefreshListener(OnRefreshListener<T> listener) {
mOnRefreshListener = listener;
mOnRefreshListener2 = null;
}
@Override
public final void setOnRefreshListener(OnRefreshListener2<T> listener) {
mOnRefreshListener2 = listener;
mOnRefreshListener = null;
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy()}.
*/
public void setPullLabel(CharSequence pullLabel) {
getLoadingLayoutProxy().setPullLabel(pullLabel);
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy(boolean, boolean)}.
*/
public void setPullLabel(CharSequence pullLabel, Mode mode) {
getLoadingLayoutProxy(mode.showHeaderLoadingLayout(), mode.showFooterLoadingLayout()).setPullLabel(pullLabel);
}
/**
* @param enable Whether Pull-To-Refresh should be used
* @deprecated This simple calls setMode with an appropriate mode based on
* the passed value.
*/
public final void setPullToRefreshEnabled(boolean enable) {
setMode(enable ? Mode.getDefault() : Mode.DISABLED);
}
@Override
public final void setPullToRefreshOverScrollEnabled(boolean enabled) {
mOverScrollEnabled = enabled;
}
@Override
public final void setRefreshing() {
setRefreshing(true);
}
@Override
public final void setRefreshing(boolean doScroll) {
if (!isRefreshing()) {
setState(State.MANUAL_REFRESHING, doScroll);
}
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy()}.
*/
public void setRefreshingLabel(CharSequence refreshingLabel) {
getLoadingLayoutProxy().setRefreshingLabel(refreshingLabel);
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy(boolean, boolean)}.
*/
public void setRefreshingLabel(CharSequence refreshingLabel, Mode mode) {
getLoadingLayoutProxy(mode.showHeaderLoadingLayout(), mode.showFooterLoadingLayout()).setRefreshingLabel(
refreshingLabel);
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy()}.
*/
public void setReleaseLabel(CharSequence releaseLabel) {
setReleaseLabel(releaseLabel, Mode.BOTH);
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy(boolean, boolean)}.
*/
public void setReleaseLabel(CharSequence releaseLabel, Mode mode) {
getLoadingLayoutProxy(mode.showHeaderLoadingLayout(), mode.showFooterLoadingLayout()).setReleaseLabel(
releaseLabel);
}
public void setScrollAnimationInterpolator(Interpolator interpolator) {
mScrollAnimationInterpolator = interpolator;
}
@Override
public final void setShowViewWhileRefreshing(boolean showView) {
mShowViewWhileRefreshing = showView;
}
/**
* @return Either {@link Orientation#VERTICAL} or
* {@link Orientation#HORIZONTAL} depending on the scroll direction.
*/
public abstract Orientation getPullToRefreshScrollDirection();
/**
* Called when the UI has been to be updated to be in the
* {@link State#PULL_TO_REFRESH} state.
*/
void onPullToRefresh() {
switch (mCurrentMode) {
case PULL_FROM_END:
mFooterLayout.pullToRefresh();
break;
case PULL_FROM_START:
mHeaderLayout.pullToRefresh();
break;
default:
// NO-OP
break;
}
}
/**
* Called when the UI has been to be updated to be in the
* {@link State#REFRESHING} or {@link State#MANUAL_REFRESHING} state.
*
* @param doScroll - Whether the UI should scroll for this event.
*/
void onRefreshing(final boolean doScroll) {
if (mMode.showHeaderLoadingLayout()) {
mHeaderLayout.refreshing();
}
if (mMode.showFooterLoadingLayout()) {
mFooterLayout.refreshing();
}
if (doScroll) {
if (mShowViewWhileRefreshing) {
switch (mCurrentMode) {
case MANUAL_REFRESH_ONLY:
case PULL_FROM_END:
smoothScrollTo(getFooterSize());
break;
default:
case PULL_FROM_START:
smoothScrollTo(-getHeaderSize());
break;
}
} else {
smoothScrollTo(0);
}
}
}
/**
* Called when the UI has been to be updated to be in the
* {@link State#RELEASE_TO_REFRESH} state.
*/
void onReleaseToRefresh() {
switch (mCurrentMode) {
case PULL_FROM_END:
mFooterLayout.releaseToRefresh();
break;
case PULL_FROM_START:
mHeaderLayout.releaseToRefresh();
break;
default:
// NO-OP
break;
}
}
/**
* Called when the UI has been to be updated to be in the
* {@link State#RESET} state.
*/
void onReset() {
mIsBeingDragged = false;
mLayoutVisibilityChangesEnabled = true;
// Always reset both layouts, just in case...
mHeaderLayout.reset();
mFooterLayout.reset();
smoothScrollTo(0);
}
final void setState(State state, final boolean... params) {
mState = state;
if (DEBUG) {
Log.d(LOG_TAG, "State: " + mState.name());
}
switch (mState) {
case RESET:
onReset();
break;
case PULL_TO_REFRESH:
onPullToRefresh();
break;
case RELEASE_TO_REFRESH:
onReleaseToRefresh();
break;
case REFRESHING:
case MANUAL_REFRESHING:
onRefreshing(params[0]);
break;
case OVERSCROLLING:
// NO-OP
break;
}
// Call OnPullEventListener
if (null != mOnPullEventListener) {
mOnPullEventListener.onPullEvent(this, mState, mCurrentMode);
}
}
/**
* Used internally for adding view. Need because we override addView to
* pass-through to the Refreshable View
*/
protected final void addViewInternal(View child, int index, ViewGroup.LayoutParams params) {
super.addView(child, index, params);
}
/**
* Used internally for adding view. Need because we override addView to
* pass-through to the Refreshable View
*/
protected final void addViewInternal(View child, ViewGroup.LayoutParams params) {
super.addView(child, -1, params);
}
protected LoadingLayout createLoadingLayout(Context context, Mode mode, TypedArray attrs) {
LoadingLayout layout = mLoadingAnimationStyle.createLoadingLayout(context, mode,
getPullToRefreshScrollDirection(), attrs);
layout.setVisibility(View.INVISIBLE);
return layout;
}
/**
* Used internally for {@link #getLoadingLayoutProxy(boolean, boolean)}.
* Allows derivative classes to include any extra LoadingLayouts.
*/
protected LoadingLayoutProxy createLoadingLayoutProxy(final boolean includeStart, final boolean includeEnd) {
LoadingLayoutProxy proxy = new LoadingLayoutProxy();
if (includeStart && mMode.showHeaderLoadingLayout()) {
proxy.addLayout(mHeaderLayout);
}
if (includeEnd && mMode.showFooterLoadingLayout()) {
proxy.addLayout(mFooterLayout);
}
return proxy;
}
/**
* This is implemented by derived classes to return the created View. If you
* need to use a custom View (such as a custom ListView), override this
* method and return an instance of your custom class.
* <p/>
* Be sure to set the ID of the view in this method, especially if you're
* using a ListActivity or ListFragment.
*
* @param context Context to create view with
* @param attrs AttributeSet from wrapped class. Means that anything you
* include in the XML layout declaration will be routed to the
* created View
* @return New instance of the Refreshable View
*/
protected abstract T createRefreshableView(Context context, AttributeSet attrs);
protected final void disableLoadingLayoutVisibilityChanges() {
mLayoutVisibilityChangesEnabled = false;
}
protected final LoadingLayout getFooterLayout() {
return mFooterLayout;
}
protected final int getFooterSize() {
return mFooterLayout.getContentSize();
}
protected final LoadingLayout getHeaderLayout() {
return mHeaderLayout;
}
protected final int getHeaderSize() {
return mHeaderLayout.getContentSize();
}
protected int getPullToRefreshScrollDuration() {
return SMOOTH_SCROLL_DURATION_MS;
}
protected int getPullToRefreshScrollDurationLonger() {
return SMOOTH_SCROLL_LONG_DURATION_MS;
}
protected FrameLayout getRefreshableViewWrapper() {
return mRefreshableViewWrapper;
}
/**
* Allows Derivative classes to handle the XML Attrs without creating a
* TypedArray themsevles
*
* @param a - TypedArray of PullToRefresh Attributes
*/
protected void handleStyledAttributes(TypedArray a) {
}
/**
* Implemented by derived class to return whether the View is in a state
* where the user can Pull to Refresh by scrolling from the start.
*
* @return true if the View is currently the correct state (for example, top
* of a ListView)
*/
protected abstract boolean isReadyForPullStart();
/**
* Implemented by derived class to return whether the View is in a state
* where the user can Pull to Refresh by scrolling from the end.
*
* @return true if the View is currently in the correct state (for example,
* bottom of a ListView)
*/
protected abstract boolean isReadyForPullEnd();
/**
* Called by {@link #onRestoreInstanceState(Parcelable)} so that derivative
* classes can handle their saved instance state.
*
* @param savedInstanceState - Bundle which contains saved instance state.
*/
protected void onPtrRestoreInstanceState(Bundle savedInstanceState) {
}
/**
* Called by {@link #onSaveInstanceState()} so that derivative classes can
* save their instance state.
*
* @param saveState - Bundle to be updated with saved state.
*/
protected void onPtrSaveInstanceState(Bundle saveState) {
}
@Override
protected final void onRestoreInstanceState(Parcelable state) {
if (state instanceof Bundle) {
Bundle bundle = (Bundle) state;
setMode(Mode.mapIntToValue(bundle.getInt(STATE_MODE, 0)));
mCurrentMode = Mode.mapIntToValue(bundle.getInt(STATE_CURRENT_MODE, 0));
mScrollingWhileRefreshingEnabled = bundle.getBoolean(STATE_SCROLLING_REFRESHING_ENABLED, false);
mShowViewWhileRefreshing = bundle.getBoolean(STATE_SHOW_REFRESHING_VIEW, true);
// Let super Restore Itself
super.onRestoreInstanceState(bundle.getParcelable(STATE_SUPER));
State viewState = State.mapIntToValue(bundle.getInt(STATE_STATE, 0));
if (viewState == State.REFRESHING || viewState == State.MANUAL_REFRESHING) {
setState(viewState, true);
}
// Now let derivative classes restore their state
onPtrRestoreInstanceState(bundle);
return;
}
super.onRestoreInstanceState(state);
}
@Override
protected final Parcelable onSaveInstanceState() {
Bundle bundle = new Bundle();
// Let derivative classes get a chance to save state first, that way we
// can make sure they don't overrite any of our values
onPtrSaveInstanceState(bundle);
bundle.putInt(STATE_STATE, mState.getIntValue());
bundle.putInt(STATE_MODE, mMode.getIntValue());
bundle.putInt(STATE_CURRENT_MODE, mCurrentMode.getIntValue());
bundle.putBoolean(STATE_SCROLLING_REFRESHING_ENABLED, mScrollingWhileRefreshingEnabled);
bundle.putBoolean(STATE_SHOW_REFRESHING_VIEW, mShowViewWhileRefreshing);
bundle.putParcelable(STATE_SUPER, super.onSaveInstanceState());
return bundle;
}
@Override
protected final void onSizeChanged(int w, int h, int oldw, int oldh) {
if (DEBUG) {
Log.d(LOG_TAG, String.format("onSizeChanged. W: %d, H: %d", w, h));
}
super.onSizeChanged(w, h, oldw, oldh);
// We need to update the header/footer when our size changes
refreshLoadingViewsSize();
// Update the Refreshable View layout
refreshRefreshableViewSize(w, h);
/**
* As we're currently in a Layout Pass, we need to schedule another one
* to layout any changes we've made here
*/
post(new Runnable() {
@Override
public void run() {
requestLayout();
}
});
}
/**
* Re-measure the Loading Views height, and adjust internal padding as
* necessary
*/
protected final void refreshLoadingViewsSize() {
final int maximumPullScroll = (int) (getMaximumPullScroll() * 1.2f);
int pLeft = getPaddingLeft();
int pTop = getPaddingTop();
int pRight = getPaddingRight();
int pBottom = getPaddingBottom();
switch (getPullToRefreshScrollDirection()) {
case HORIZONTAL:
if (mMode.showHeaderLoadingLayout()) {
mHeaderLayout.setWidth(maximumPullScroll);
pLeft = -maximumPullScroll;
} else {
pLeft = 0;
}
if (mMode.showFooterLoadingLayout()) {
mFooterLayout.setWidth(maximumPullScroll);
pRight = -maximumPullScroll;
} else {
pRight = 0;
}
break;
case VERTICAL:
if (mMode.showHeaderLoadingLayout()) {
mHeaderLayout.setHeight(maximumPullScroll);
pTop = -maximumPullScroll;
} else {
pTop = 0;
}
if (mMode.showFooterLoadingLayout()) {
mFooterLayout.setHeight(maximumPullScroll);
pBottom = -maximumPullScroll;
} else {
pBottom = 0;
}
break;
}
if (DEBUG) {
Log.d(LOG_TAG, String.format("Setting Padding. L: %d, T: %d, R: %d, B: %d", pLeft, pTop, pRight, pBottom));
}
setPadding(pLeft, pTop, pRight, pBottom);
}
protected final void refreshRefreshableViewSize(int width, int height) {
// We need to set the Height of the Refreshable View to the same as
// this layout
LinearLayout.LayoutParams lp = (LinearLayout.LayoutParams) mRefreshableViewWrapper.getLayoutParams();
switch (getPullToRefreshScrollDirection()) {
case HORIZONTAL:
if (lp.width != width) {
lp.width = width;
mRefreshableViewWrapper.requestLayout();
}
break;
case VERTICAL:
if (lp.height != height) {
lp.height = height;
mRefreshableViewWrapper.requestLayout();
}
break;
}
}
/**
* Helper method which just calls scrollTo() in the correct scrolling
* direction.
*
* @param value - New Scroll value
*/
protected final void setHeaderScroll(final int value) {
if (DEBUG) {
Log.d(LOG_TAG, "setHeaderScroll: " + value);
}
if (mLayoutVisibilityChangesEnabled) {
if (value < 0) {
mHeaderLayout.setVisibility(View.VISIBLE);
} else if (value > 0) {
mFooterLayout.setVisibility(View.VISIBLE);
} else {
mHeaderLayout.setVisibility(View.INVISIBLE);
mFooterLayout.setVisibility(View.INVISIBLE);
}
}
/**
* Use a Hardware Layer on the Refreshable View if we've scrolled at
* all. We don't use them on the Header/Footer Views as they change
* often, which would negate any HW layer performance boost.
*/
ViewCompat.setLayerType(mRefreshableViewWrapper, value != 0 ? View.LAYER_TYPE_HARDWARE : View.LAYER_TYPE_NONE);
switch (getPullToRefreshScrollDirection()) {
case VERTICAL:
scrollTo(0, value);
break;
case HORIZONTAL:
scrollTo(value, 0);
break;
}
}
/**
* Smooth Scroll to position using the default duration of
* {@value #SMOOTH_SCROLL_DURATION_MS} ms.
*
* @param scrollValue - Position to scroll to
*/
protected final void smoothScrollTo(int scrollValue) {
smoothScrollTo(scrollValue, getPullToRefreshScrollDuration());
}
/**
* Smooth Scroll to position using the longer default duration of
* {@value #SMOOTH_SCROLL_LONG_DURATION_MS} ms.
*
* @param scrollValue - Position to scroll to
*/
protected final void smoothScrollToLonger(int scrollValue) {
smoothScrollTo(scrollValue, getPullToRefreshScrollDurationLonger());
}
/**
* Updates the View State when the mode has been set. This does not do any
* checking that the mode is different to current state so always updates.
*/
protected void updateUIForMode() {
// We need to use the correct LayoutParam values, based on scroll
// direction
final LinearLayout.LayoutParams lp = getLoadingLayoutLayoutParams();
// Remove Header, and then add Header Loading View again if needed
if (this == mHeaderLayout.getParent()) {
removeView(mHeaderLayout);
}
if (mMode.showHeaderLoadingLayout()) {
addViewInternal(mHeaderLayout, 0, lp);
}
// Remove Footer, and then add Footer Loading View again if needed
if (this == mFooterLayout.getParent()) {
removeView(mFooterLayout);
}
if (mMode.showFooterLoadingLayout()) {
addViewInternal(mFooterLayout, lp);
}
// Hide Loading Views
refreshLoadingViewsSize();
// If we're not using Mode.BOTH, set mCurrentMode to mMode, otherwise
// set it to pull down
mCurrentMode = (mMode != Mode.BOTH) ? mMode : Mode.PULL_FROM_START;
}
private void addRefreshableView(Context context, T refreshableView) {
mRefreshableViewWrapper = new FrameLayout(context);
mRefreshableViewWrapper.addView(refreshableView, ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.MATCH_PARENT);
addViewInternal(mRefreshableViewWrapper, new LinearLayout.LayoutParams(LayoutParams.MATCH_PARENT,
LayoutParams.MATCH_PARENT));
}
@SuppressWarnings("deprecation")
private void init(Context context, AttributeSet attrs) {
switch (getPullToRefreshScrollDirection()) {
case HORIZONTAL:
setOrientation(LinearLayout.HORIZONTAL);
break;
case VERTICAL:
default:
setOrientation(LinearLayout.VERTICAL);
break;
}
setGravity(Gravity.CENTER);
ViewConfiguration config = ViewConfiguration.get(context);
mTouchSlop = config.getScaledTouchSlop();
// Styleables from XML
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.PullToRefresh);
if (a.hasValue(R.styleable.PullToRefresh_ptrMode)) {
mMode = Mode.mapIntToValue(a.getInteger(R.styleable.PullToRefresh_ptrMode, 0));
}
if (a.hasValue(R.styleable.PullToRefresh_ptrAnimationStyle)) {
mLoadingAnimationStyle = AnimationStyle.mapIntToValue(a.getInteger(
R.styleable.PullToRefresh_ptrAnimationStyle, 0));
}
// Refreshable View
// By passing the attrs, we can add ListView/GridView params via XML
mRefreshableView = createRefreshableView(context, attrs);
addRefreshableView(context, mRefreshableView);
// We need to create now layouts now
mHeaderLayout = createLoadingLayout(context, Mode.PULL_FROM_START, a);
mFooterLayout = createLoadingLayout(context, Mode.PULL_FROM_END, a);
/**
* Styleables from XML
*/
if (a.hasValue(R.styleable.PullToRefresh_ptrRefreshableViewBackground)) {
Drawable background = a.getDrawable(R.styleable.PullToRefresh_ptrRefreshableViewBackground);
if (null != background) {
mRefreshableView.setBackgroundDrawable(background);
}
} else if (a.hasValue(R.styleable.PullToRefresh_ptrAdapterViewBackground)) {
Utils.warnDeprecation("ptrAdapterViewBackground", "ptrRefreshableViewBackground");
Drawable background = a.getDrawable(R.styleable.PullToRefresh_ptrAdapterViewBackground);
if (null != background) {
mRefreshableView.setBackgroundDrawable(background);
}
}
if (a.hasValue(R.styleable.PullToRefresh_ptrOverScroll)) {
mOverScrollEnabled = a.getBoolean(R.styleable.PullToRefresh_ptrOverScroll, true);
}
if (a.hasValue(R.styleable.PullToRefresh_ptrScrollingWhileRefreshingEnabled)) {
mScrollingWhileRefreshingEnabled = a.getBoolean(
R.styleable.PullToRefresh_ptrScrollingWhileRefreshingEnabled, false);
}
// Let the derivative classes have a go at handling attributes, then
// recycle them...
handleStyledAttributes(a);
a.recycle();
// Finally update the UI for the modes
updateUIForMode();
}
private boolean isReadyForPull() {
switch (mMode) {
case PULL_FROM_START:
return isReadyForPullStart();
case PULL_FROM_END:
return isReadyForPullEnd();
case BOTH:
return isReadyForPullEnd() || isReadyForPullStart();
default:
return false;
}
}
/**
* Actions a Pull Event
*
* @return true if the Event has been handled, false if there has been no
* change
*/
private void pullEvent() {
final int newScrollValue;
final int itemDimension;
final float initialMotionValue, lastMotionValue;
switch (getPullToRefreshScrollDirection()) {
case HORIZONTAL:
initialMotionValue = mInitialMotionX;
lastMotionValue = mLastMotionX;
break;
case VERTICAL:
default:
initialMotionValue = mInitialMotionY;
lastMotionValue = mLastMotionY;
break;
}
switch (mCurrentMode) {
case PULL_FROM_END:
newScrollValue = Math.round(Math.max(initialMotionValue - lastMotionValue, 0) / FRICTION);
itemDimension = getFooterSize();
break;
case PULL_FROM_START:
default:
newScrollValue = Math.round(Math.min(initialMotionValue - lastMotionValue, 0) / FRICTION);
itemDimension = getHeaderSize();
break;
}
setHeaderScroll(newScrollValue);
if (newScrollValue != 0 && !isRefreshing()) {
float scale = Math.abs(newScrollValue) / (float) itemDimension;
switch (mCurrentMode) {
case PULL_FROM_END:
mFooterLayout.onPull(scale);
break;
case PULL_FROM_START:
default:
mHeaderLayout.onPull(scale);
break;
}
if (mState != State.PULL_TO_REFRESH && itemDimension >= Math.abs(newScrollValue)) {
setState(State.PULL_TO_REFRESH);
} else if (mState == State.PULL_TO_REFRESH && itemDimension < Math.abs(newScrollValue)) {
setState(State.RELEASE_TO_REFRESH);
}
}
}
private LinearLayout.LayoutParams getLoadingLayoutLayoutParams() {
switch (getPullToRefreshScrollDirection()) {
case HORIZONTAL:
return new LinearLayout.LayoutParams(LinearLayout.LayoutParams.WRAP_CONTENT,
LinearLayout.LayoutParams.MATCH_PARENT);
case VERTICAL:
default:
return new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT,
LinearLayout.LayoutParams.WRAP_CONTENT);
}
}
private int getMaximumPullScroll() {
switch (getPullToRefreshScrollDirection()) {
case HORIZONTAL:
return Math.round(getWidth() / FRICTION);
case VERTICAL:
default:
return Math.round(getHeight() / FRICTION);
}
}
/**
* Smooth Scroll to position using the specific duration
*
* @param scrollValue - Position to scroll to
* @param duration - Duration of animation in milliseconds
*/
private final void smoothScrollTo(int scrollValue, long duration) {
smoothScrollTo(scrollValue, duration, 0, null);
}
private final void smoothScrollTo(int newScrollValue, long duration, long delayMillis,
OnSmoothScrollFinishedListener listener) {
if (null != mCurrentSmoothScrollRunnable) {
mCurrentSmoothScrollRunnable.stop();
}
final int oldScrollValue;
switch (getPullToRefreshScrollDirection()) {
case HORIZONTAL:
oldScrollValue = getScrollX();
break;
case VERTICAL:
default:
oldScrollValue = getScrollY();
break;
}
if (oldScrollValue != newScrollValue) {
if (null == mScrollAnimationInterpolator) {
// Default interpolator is a Decelerate Interpolator
mScrollAnimationInterpolator = new DecelerateInterpolator();
}
mCurrentSmoothScrollRunnable = new SmoothScrollRunnable(oldScrollValue, newScrollValue, duration, listener);
if (delayMillis > 0) {
postDelayed(mCurrentSmoothScrollRunnable, delayMillis);
} else {
post(mCurrentSmoothScrollRunnable);
}
}
}
private final void smoothScrollToAndBack(int y) {
smoothScrollTo(y, SMOOTH_SCROLL_DURATION_MS, 0, new OnSmoothScrollFinishedListener() {
@Override
public void onSmoothScrollFinished() {
smoothScrollTo(0, SMOOTH_SCROLL_DURATION_MS, DEMO_SCROLL_INTERVAL, null);
}
});
}
public static enum AnimationStyle {
/**
* This is the default for Android-PullToRefresh. Allows you to use any
* drawable, which is automatically rotated and used as a Progress Bar.
*/
ROTATE,
/**
* This is the old default, and what is commonly used on iOS. Uses an
* arrow image which flips depending on where the user has scrolled.
*/
FLIP;
static AnimationStyle getDefault() {
return ROTATE;
}
/**
* Maps an int to a specific mode. This is needed when saving state, or
* inflating the view from XML where the mode is given through a attr
* int.
*
* @param modeInt - int to map a Mode to
* @return Mode that modeInt maps to, or ROTATE by default.
*/
static AnimationStyle mapIntToValue(int modeInt) {
switch (modeInt) {
case 0x0:
default:
return ROTATE;
case 0x1:
return FLIP;
}
}
LoadingLayout createLoadingLayout(Context context, Mode mode, Orientation scrollDirection, TypedArray attrs) {
switch (this) {
case ROTATE:
default:
return new RotateLoadingLayout(context, mode, scrollDirection, attrs);
case FLIP:
return new FlipLoadingLayout(context, mode, scrollDirection, attrs);
}
}
}
public static enum Mode {
/**
* Disable all Pull-to-Refresh gesture and Refreshing handling
*/
DISABLED(0x0),
/**
* Only allow the user to Pull from the start of the Refreshable View to
* refresh. The start is either the Top or Left, depending on the
* scrolling direction.
*/
PULL_FROM_START(0x1),
/**
* Only allow the user to Pull from the end of the Refreshable View to
* refresh. The start is either the Bottom or Right, depending on the
* scrolling direction.
*/
PULL_FROM_END(0x2),
/**
* Allow the user to both Pull from the start, from the end to refresh.
*/
BOTH(0x3),
/**
* Disables Pull-to-Refresh gesture handling, but allows manually
* setting the Refresh state via
* {@link PullToRefreshBase#setRefreshing() setRefreshing()}.
*/
MANUAL_REFRESH_ONLY(0x4);
/**
* @deprecated Use {@link #PULL_FROM_START} from now on.
*/
public static Mode PULL_DOWN_TO_REFRESH = Mode.PULL_FROM_START;
/**
* @deprecated Use {@link #PULL_FROM_END} from now on.
*/
public static Mode PULL_UP_TO_REFRESH = Mode.PULL_FROM_END;
/**
* Maps an int to a specific mode. This is needed when saving state, or
* inflating the view from XML where the mode is given through a attr
* int.
*
* @param modeInt - int to map a Mode to
* @return Mode that modeInt maps to, or PULL_FROM_START by default.
*/
static Mode mapIntToValue(final int modeInt) {
for (Mode value : Mode.values()) {
if (modeInt == value.getIntValue()) {
return value;
}
}
// If not, return default
return getDefault();
}
static Mode getDefault() {
return PULL_FROM_START;
}
private int mIntValue;
// The modeInt values need to match those from attrs.xml
Mode(int modeInt) {
mIntValue = modeInt;
}
/**
* @return true if the mode permits Pull-to-Refresh
*/
boolean permitsPullToRefresh() {
return !(this == DISABLED || this == MANUAL_REFRESH_ONLY);
}
/**
* @return true if this mode wants the Loading Layout Header to be shown
*/
boolean showHeaderLoadingLayout() {
return this == PULL_FROM_START || this == BOTH;
}
/**
* @return true if this mode wants the Loading Layout Footer to be shown
*/
boolean showFooterLoadingLayout() {
return this == PULL_FROM_END || this == BOTH || this == MANUAL_REFRESH_ONLY;
}
int getIntValue() {
return mIntValue;
}
}
// ===========================================================
// Inner, Anonymous Classes, and Enumerations
// ===========================================================
/**
* Simple Listener that allows you to be notified when the user has scrolled
* to the end of the AdapterView. See (
* {@link PullToRefreshAdapterViewBase#setOnLastItemVisibleListener}.
*
* @author Chris Banes
*/
public static interface OnLastItemVisibleListener {
/**
* Called when the user has scrolled to the end of the list
*/
public void onLastItemVisible();
}
/**
* Listener that allows you to be notified when the user has started or
* finished a touch event. Useful when you want to append extra UI events
* (such as sounds). See (
* {@link PullToRefreshAdapterViewBase#setOnPullEventListener}.
*
* @author Chris Banes
*/
public static interface OnPullEventListener<V extends View> {
/**
* Called when the internal state has been changed, usually by the user
* pulling.
*
* @param refreshView - View which has had it's state change.
* @param state - The new state of View.
* @param direction - One of {@link Mode#PULL_FROM_START} or
* {@link Mode#PULL_FROM_END} depending on which direction
* the user is pulling. Only useful when <var>state</var> is
* {@link State#PULL_TO_REFRESH} or
* {@link State#RELEASE_TO_REFRESH}.
*/
public void onPullEvent(final PullToRefreshBase<V> refreshView, State state, Mode direction);
}
/**
* Simple Listener to listen for any callbacks to Refresh.
*
* @author Chris Banes
*/
public static interface OnRefreshListener<V extends View> {
/**
* onRefresh will be called for both a Pull from start, and Pull from
* end
*/
public void onRefresh(final PullToRefreshBase<V> refreshView);
}
/**
* An advanced version of the Listener to listen for callbacks to Refresh.
* This listener is different as it allows you to differentiate between Pull
* Ups, and Pull Downs.
*
* @author Chris Banes
*/
public static interface OnRefreshListener2<V extends View> {
// TODO These methods need renaming to START/END rather than DOWN/UP
/**
* onPullDownToRefresh will be called only when the user has Pulled from
* the start, and released.
*/
public void onPullDownToRefresh(final PullToRefreshBase<V> refreshView);
/**
* onPullUpToRefresh will be called only when the user has Pulled from
* the end, and released.
*/
public void onPullUpToRefresh(final PullToRefreshBase<V> refreshView);
}
public static enum Orientation {
VERTICAL, HORIZONTAL;
}
public static enum State {
/**
* When the UI is in a state which means that user is not interacting
* with the Pull-to-Refresh function.
*/
RESET(0x0),
/**
* When the UI is being pulled by the user, but has not been pulled far
* enough so that it refreshes when released.
*/
PULL_TO_REFRESH(0x1),
/**
* When the UI is being pulled by the user, and <strong>has</strong>
* been pulled far enough so that it will refresh when released.
*/
RELEASE_TO_REFRESH(0x2),
/**
* When the UI is currently refreshing, caused by a pull gesture.
*/
REFRESHING(0x8),
/**
* When the UI is currently refreshing, caused by a call to
* {@link PullToRefreshBase#setRefreshing() setRefreshing()}.
*/
MANUAL_REFRESHING(0x9),
/**
* When the UI is currently overscrolling, caused by a fling on the
* Refreshable View.
*/
OVERSCROLLING(0x10);
/**
* Maps an int to a specific state. This is needed when saving state.
*
* @param stateInt - int to map a State to
* @return State that stateInt maps to
*/
static State mapIntToValue(final int stateInt) {
for (State value : State.values()) {
if (stateInt == value.getIntValue()) {
return value;
}
}
// If not, return default
return RESET;
}
private int mIntValue;
State(int intValue) {
mIntValue = intValue;
}
int getIntValue() {
return mIntValue;
}
}
final class SmoothScrollRunnable implements Runnable {
private final Interpolator mInterpolator;
private final int mScrollToY;
private final int mScrollFromY;
private final long mDuration;
private OnSmoothScrollFinishedListener mListener;
private boolean mContinueRunning = true;
private long mStartTime = -1;
private int mCurrentY = -1;
public SmoothScrollRunnable(int fromY, int toY, long duration, OnSmoothScrollFinishedListener listener) {
mScrollFromY = fromY;
mScrollToY = toY;
mInterpolator = mScrollAnimationInterpolator;
mDuration = duration;
mListener = listener;
}
@Override
public void run() {
/**
* Only set mStartTime if this is the first time we're starting,
* else actually calculate the Y delta
*/
if (mStartTime == -1) {
mStartTime = System.currentTimeMillis();
} else {
/**
* We do do all calculations in long to reduce software float
* calculations. We use 1000 as it gives us good accuracy and
* small rounding errors
*/
long normalizedTime = (1000 * (System.currentTimeMillis() - mStartTime)) / mDuration;
normalizedTime = Math.max(Math.min(normalizedTime, 1000), 0);
final int deltaY = Math.round((mScrollFromY - mScrollToY)
* mInterpolator.getInterpolation(normalizedTime / 1000f));
mCurrentY = mScrollFromY - deltaY;
setHeaderScroll(mCurrentY);
}
// If we're not at the target Y, keep going...
if (mContinueRunning && mScrollToY != mCurrentY) {
ViewCompat.postOnAnimation(PullToRefreshBase.this, this);
} else {
if (null != mListener) {
mListener.onSmoothScrollFinished();
}
}
}
public void stop() {
mContinueRunning = false;
removeCallbacks(this);
}
}
static interface OnSmoothScrollFinishedListener {
void onSmoothScrollFinished();
}
}
| library/src/com/handmark/pulltorefresh/library/PullToRefreshBase.java | /*******************************************************************************
* Copyright 2011, 2012 Chris Banes.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.handmark.pulltorefresh.library;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.drawable.Drawable;
import android.os.Build.VERSION;
import android.os.Build.VERSION_CODES;
import android.os.Bundle;
import android.os.Parcelable;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Gravity;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import android.view.animation.DecelerateInterpolator;
import android.view.animation.Interpolator;
import android.widget.FrameLayout;
import android.widget.LinearLayout;
import com.handmark.pulltorefresh.library.internal.FlipLoadingLayout;
import com.handmark.pulltorefresh.library.internal.LoadingLayout;
import com.handmark.pulltorefresh.library.internal.RotateLoadingLayout;
import com.handmark.pulltorefresh.library.internal.Utils;
import com.handmark.pulltorefresh.library.internal.ViewCompat;
public abstract class PullToRefreshBase<T extends View> extends LinearLayout implements IPullToRefresh<T> {
// ===========================================================
// Constants
// ===========================================================
static final boolean DEBUG = true;
static final String LOG_TAG = "PullToRefresh";
static final float FRICTION = 2.0f;
public static final int SMOOTH_SCROLL_DURATION_MS = 200;
public static final int SMOOTH_SCROLL_LONG_DURATION_MS = 325;
static final int DEMO_SCROLL_INTERVAL = 225;
static final String STATE_STATE = "ptr_state";
static final String STATE_MODE = "ptr_mode";
static final String STATE_CURRENT_MODE = "ptr_current_mode";
static final String STATE_SCROLLING_REFRESHING_ENABLED = "ptr_disable_scrolling";
static final String STATE_SHOW_REFRESHING_VIEW = "ptr_show_refreshing_view";
static final String STATE_SUPER = "ptr_super";
// ===========================================================
// Fields
// ===========================================================
private int mTouchSlop;
private float mLastMotionX, mLastMotionY;
private float mInitialMotionX, mInitialMotionY;
private boolean mIsBeingDragged = false;
private State mState = State.RESET;
private Mode mMode = Mode.getDefault();
private Mode mCurrentMode;
T mRefreshableView;
private FrameLayout mRefreshableViewWrapper;
private boolean mShowViewWhileRefreshing = true;
private boolean mScrollingWhileRefreshingEnabled = false;
private boolean mFilterTouchEvents = true;
private boolean mOverScrollEnabled = true;
private boolean mLayoutVisibilityChangesEnabled = true;
private Interpolator mScrollAnimationInterpolator;
private AnimationStyle mLoadingAnimationStyle = AnimationStyle.getDefault();
private LoadingLayout mHeaderLayout;
private LoadingLayout mFooterLayout;
private OnRefreshListener<T> mOnRefreshListener;
private OnRefreshListener2<T> mOnRefreshListener2;
private OnPullEventListener<T> mOnPullEventListener;
private SmoothScrollRunnable mCurrentSmoothScrollRunnable;
// ===========================================================
// Constructors
// ===========================================================
public PullToRefreshBase(Context context) {
super(context);
init(context, null);
}
public PullToRefreshBase(Context context, AttributeSet attrs) {
super(context, attrs);
init(context, attrs);
}
public PullToRefreshBase(Context context, Mode mode) {
super(context);
mMode = mode;
init(context, null);
}
public PullToRefreshBase(Context context, Mode mode, AnimationStyle animStyle) {
super(context);
mMode = mode;
mLoadingAnimationStyle = animStyle;
init(context, null);
}
@Override
public void addView(View child, int index, ViewGroup.LayoutParams params) {
if (DEBUG) {
Log.d(LOG_TAG, "addView: " + child.getClass().getSimpleName());
}
final T refreshableView = getRefreshableView();
if (refreshableView instanceof ViewGroup) {
((ViewGroup) refreshableView).addView(child, index, params);
} else {
throw new UnsupportedOperationException("Refreshable View is not a ViewGroup so can't addView");
}
}
@Override
public final boolean demo() {
if (mMode.showHeaderLoadingLayout() && isReadyForPullStart()) {
smoothScrollToAndBack(-getHeaderSize() * 2);
return true;
} else if (mMode.showFooterLoadingLayout() && isReadyForPullEnd()) {
smoothScrollToAndBack(getFooterSize() * 2);
return true;
}
return false;
}
@Override
public final Mode getCurrentMode() {
return mCurrentMode;
}
@Override
public final boolean getFilterTouchEvents() {
return mFilterTouchEvents;
}
@Override
public final ILoadingLayout getLoadingLayoutProxy() {
return getLoadingLayoutProxy(true, true);
}
@Override
public final ILoadingLayout getLoadingLayoutProxy(boolean includeStart, boolean includeEnd) {
return createLoadingLayoutProxy(includeStart, includeEnd);
}
@Override
public final Mode getMode() {
return mMode;
}
@Override
public final T getRefreshableView() {
return mRefreshableView;
}
@Override
public final boolean getShowViewWhileRefreshing() {
return mShowViewWhileRefreshing;
}
@Override
public final State getState() {
return mState;
}
/**
* @deprecated See {@link #isScrollingWhileRefreshingEnabled()}.
*/
public final boolean isDisableScrollingWhileRefreshing() {
return !isScrollingWhileRefreshingEnabled();
}
@Override
public final boolean isPullToRefreshEnabled() {
return mMode.permitsPullToRefresh();
}
@Override
public final boolean isPullToRefreshOverScrollEnabled() {
return VERSION.SDK_INT >= VERSION_CODES.GINGERBREAD && mOverScrollEnabled
&& OverscrollHelper.isAndroidOverScrollEnabled(mRefreshableView);
}
@Override
public final boolean isRefreshing() {
return mState == State.REFRESHING || mState == State.MANUAL_REFRESHING;
}
@Override
public final boolean isScrollingWhileRefreshingEnabled() {
return mScrollingWhileRefreshingEnabled;
}
@Override
public final boolean onInterceptTouchEvent(MotionEvent event) {
if (!isPullToRefreshEnabled()) {
return false;
}
final int action = event.getAction();
if (action == MotionEvent.ACTION_CANCEL || action == MotionEvent.ACTION_UP) {
mIsBeingDragged = false;
return false;
}
if (action != MotionEvent.ACTION_DOWN && mIsBeingDragged) {
return true;
}
switch (action) {
case MotionEvent.ACTION_MOVE: {
// If we're refreshing, and the flag is set. Eat all MOVE events
if (!mScrollingWhileRefreshingEnabled && isRefreshing()) {
return true;
}
if (isReadyForPull()) {
final float y = event.getY(), x = event.getX();
final float diff, oppositeDiff, absDiff;
// We need to use the correct values, based on scroll
// direction
switch (getPullToRefreshScrollDirection()) {
case HORIZONTAL:
diff = x - mLastMotionX;
oppositeDiff = y - mLastMotionY;
break;
case VERTICAL:
default:
diff = y - mLastMotionY;
oppositeDiff = x - mLastMotionX;
break;
}
absDiff = Math.abs(diff);
if (absDiff > mTouchSlop && (!mFilterTouchEvents || absDiff > Math.abs(oppositeDiff))) {
if (mMode.showHeaderLoadingLayout() && diff >= 1f && isReadyForPullStart()) {
mLastMotionY = y;
mLastMotionX = x;
mIsBeingDragged = true;
if (mMode == Mode.BOTH) {
mCurrentMode = Mode.PULL_FROM_START;
}
} else if (mMode.showFooterLoadingLayout() && diff <= -1f && isReadyForPullEnd()) {
mLastMotionY = y;
mLastMotionX = x;
mIsBeingDragged = true;
if (mMode == Mode.BOTH) {
mCurrentMode = Mode.PULL_FROM_END;
}
}
}
}
break;
}
case MotionEvent.ACTION_DOWN: {
if (isReadyForPull()) {
mLastMotionY = mInitialMotionY = event.getY();
mLastMotionX = mInitialMotionX = event.getX();
mIsBeingDragged = false;
}
break;
}
}
return mIsBeingDragged;
}
@Override
public final void onRefreshComplete() {
if (isRefreshing()) {
setState(State.RESET);
}
}
@Override
public final boolean onTouchEvent(MotionEvent event) {
if (!isPullToRefreshEnabled()) {
return false;
}
// If we're refreshing, and the flag is set. Eat the event
if (!mScrollingWhileRefreshingEnabled && isRefreshing()) {
return true;
}
if (event.getAction() == MotionEvent.ACTION_DOWN && event.getEdgeFlags() != 0) {
return false;
}
switch (event.getAction()) {
case MotionEvent.ACTION_MOVE: {
if (mIsBeingDragged) {
mLastMotionY = event.getY();
mLastMotionX = event.getX();
pullEvent();
return true;
}
break;
}
case MotionEvent.ACTION_DOWN: {
if (isReadyForPull()) {
mLastMotionY = mInitialMotionY = event.getY();
mLastMotionX = mInitialMotionX = event.getX();
return true;
}
break;
}
case MotionEvent.ACTION_CANCEL:
case MotionEvent.ACTION_UP: {
if (mIsBeingDragged) {
mIsBeingDragged = false;
if (mState == State.RELEASE_TO_REFRESH) {
if (null != mOnRefreshListener) {
setState(State.REFRESHING, true);
mOnRefreshListener.onRefresh(this);
return true;
} else if (null != mOnRefreshListener2) {
setState(State.REFRESHING, true);
if (mCurrentMode == Mode.PULL_FROM_START) {
mOnRefreshListener2.onPullDownToRefresh(this);
} else if (mCurrentMode == Mode.PULL_FROM_END) {
mOnRefreshListener2.onPullUpToRefresh(this);
}
return true;
}
}
// If we're already refreshing, just scroll back to the top
if (isRefreshing()) {
smoothScrollTo(0);
return true;
}
// If we haven't returned by here, then we're not in a state
// to pull, so just reset
setState(State.RESET);
return true;
}
break;
}
}
return false;
}
public final void setScrollingWhileRefreshingEnabled(boolean allowScrollingWhileRefreshing) {
mScrollingWhileRefreshingEnabled = allowScrollingWhileRefreshing;
}
/**
* @deprecated See {@link #setScrollingWhileRefreshingEnabled(boolean)}
*/
public void setDisableScrollingWhileRefreshing(boolean disableScrollingWhileRefreshing) {
setScrollingWhileRefreshingEnabled(!disableScrollingWhileRefreshing);
}
@Override
public final void setFilterTouchEvents(boolean filterEvents) {
mFilterTouchEvents = filterEvents;
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy()}.
*/
public void setLastUpdatedLabel(CharSequence label) {
getLoadingLayoutProxy().setLastUpdatedLabel(label);
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy()}.
*/
public void setLoadingDrawable(Drawable drawable) {
getLoadingLayoutProxy().setLoadingDrawable(drawable);
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy(boolean, boolean)}.
*/
public void setLoadingDrawable(Drawable drawable, Mode mode) {
getLoadingLayoutProxy(mode.showHeaderLoadingLayout(), mode.showFooterLoadingLayout()).setLoadingDrawable(
drawable);
}
@Override
public void setLongClickable(boolean longClickable) {
getRefreshableView().setLongClickable(longClickable);
}
@Override
public final void setMode(Mode mode) {
if (mode != mMode) {
if (DEBUG) {
Log.d(LOG_TAG, "Setting mode to: " + mode);
}
mMode = mode;
updateUIForMode();
}
}
public void setOnPullEventListener(OnPullEventListener<T> listener) {
mOnPullEventListener = listener;
}
@Override
public final void setOnRefreshListener(OnRefreshListener<T> listener) {
mOnRefreshListener = listener;
mOnRefreshListener2 = null;
}
@Override
public final void setOnRefreshListener(OnRefreshListener2<T> listener) {
mOnRefreshListener2 = listener;
mOnRefreshListener = null;
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy()}.
*/
public void setPullLabel(CharSequence pullLabel) {
getLoadingLayoutProxy().setPullLabel(pullLabel);
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy(boolean, boolean)}.
*/
public void setPullLabel(CharSequence pullLabel, Mode mode) {
getLoadingLayoutProxy(mode.showHeaderLoadingLayout(), mode.showFooterLoadingLayout()).setPullLabel(pullLabel);
}
/**
* @param enable Whether Pull-To-Refresh should be used
* @deprecated This simple calls setMode with an appropriate mode based on
* the passed value.
*/
public final void setPullToRefreshEnabled(boolean enable) {
setMode(enable ? Mode.getDefault() : Mode.DISABLED);
}
@Override
public final void setPullToRefreshOverScrollEnabled(boolean enabled) {
mOverScrollEnabled = enabled;
}
@Override
public final void setRefreshing() {
setRefreshing(true);
}
@Override
public final void setRefreshing(boolean doScroll) {
if (!isRefreshing()) {
setState(State.MANUAL_REFRESHING, doScroll);
}
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy()}.
*/
public void setRefreshingLabel(CharSequence refreshingLabel) {
getLoadingLayoutProxy().setRefreshingLabel(refreshingLabel);
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy(boolean, boolean)}.
*/
public void setRefreshingLabel(CharSequence refreshingLabel, Mode mode) {
getLoadingLayoutProxy(mode.showHeaderLoadingLayout(), mode.showFooterLoadingLayout()).setRefreshingLabel(
refreshingLabel);
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy()}.
*/
public void setReleaseLabel(CharSequence releaseLabel) {
setReleaseLabel(releaseLabel, Mode.BOTH);
}
/**
* @deprecated You should now call this method on the result of
* {@link #getLoadingLayoutProxy(boolean, boolean)}.
*/
public void setReleaseLabel(CharSequence releaseLabel, Mode mode) {
getLoadingLayoutProxy(mode.showHeaderLoadingLayout(), mode.showFooterLoadingLayout()).setReleaseLabel(
releaseLabel);
}
public void setScrollAnimationInterpolator(Interpolator interpolator) {
mScrollAnimationInterpolator = interpolator;
}
@Override
public final void setShowViewWhileRefreshing(boolean showView) {
mShowViewWhileRefreshing = showView;
}
/**
* @return Either {@link Orientation#VERTICAL} or
* {@link Orientation#HORIZONTAL} depending on the scroll direction.
*/
public abstract Orientation getPullToRefreshScrollDirection();
/**
* Called when the UI has been to be updated to be in the
* {@link State#PULL_TO_REFRESH} state.
*/
void onPullToRefresh() {
switch (mCurrentMode) {
case PULL_FROM_END:
mFooterLayout.pullToRefresh();
break;
case PULL_FROM_START:
mHeaderLayout.pullToRefresh();
break;
default:
// NO-OP
break;
}
}
/**
* Called when the UI has been to be updated to be in the
* {@link State#REFRESHING} or {@link State#MANUAL_REFRESHING} state.
*
* @param doScroll - Whether the UI should scroll for this event.
*/
void onRefreshing(final boolean doScroll) {
if (mMode.showHeaderLoadingLayout()) {
mHeaderLayout.refreshing();
}
if (mMode.showFooterLoadingLayout()) {
mFooterLayout.refreshing();
}
if (doScroll) {
if (mShowViewWhileRefreshing) {
switch (mCurrentMode) {
case MANUAL_REFRESH_ONLY:
case PULL_FROM_END:
smoothScrollTo(getFooterSize());
break;
default:
case PULL_FROM_START:
smoothScrollTo(-getHeaderSize());
break;
}
} else {
smoothScrollTo(0);
}
}
}
/**
* Called when the UI has been to be updated to be in the
* {@link State#RELEASE_TO_REFRESH} state.
*/
void onReleaseToRefresh() {
switch (mCurrentMode) {
case PULL_FROM_END:
mFooterLayout.releaseToRefresh();
break;
case PULL_FROM_START:
mHeaderLayout.releaseToRefresh();
break;
default:
// NO-OP
break;
}
}
/**
* Called when the UI has been to be updated to be in the
* {@link State#RESET} state.
*/
void onReset() {
mIsBeingDragged = false;
mLayoutVisibilityChangesEnabled = true;
// Always reset both layouts, just in case...
mHeaderLayout.reset();
mFooterLayout.reset();
smoothScrollTo(0);
}
final void setState(State state, final boolean... params) {
mState = state;
if (DEBUG) {
Log.d(LOG_TAG, "State: " + mState.name());
}
switch (mState) {
case RESET:
onReset();
break;
case PULL_TO_REFRESH:
onPullToRefresh();
break;
case RELEASE_TO_REFRESH:
onReleaseToRefresh();
break;
case REFRESHING:
case MANUAL_REFRESHING:
onRefreshing(params[0]);
break;
case OVERSCROLLING:
// NO-OP
break;
}
// Call OnPullEventListener
if (null != mOnPullEventListener) {
mOnPullEventListener.onPullEvent(this, mState, mCurrentMode);
}
}
/**
* Used internally for adding view. Need because we override addView to
* pass-through to the Refreshable View
*/
protected final void addViewInternal(View child, int index, ViewGroup.LayoutParams params) {
super.addView(child, index, params);
}
/**
* Used internally for adding view. Need because we override addView to
* pass-through to the Refreshable View
*/
protected final void addViewInternal(View child, ViewGroup.LayoutParams params) {
super.addView(child, -1, params);
}
protected LoadingLayout createLoadingLayout(Context context, Mode mode, TypedArray attrs) {
LoadingLayout layout = mLoadingAnimationStyle.createLoadingLayout(context, mode,
getPullToRefreshScrollDirection(), attrs);
layout.setVisibility(View.INVISIBLE);
return layout;
}
/**
* Used internally for {@link #getLoadingLayoutProxy(boolean, boolean)}.
* Allows derivative classes to include any extra LoadingLayouts.
*/
protected LoadingLayoutProxy createLoadingLayoutProxy(final boolean includeStart, final boolean includeEnd) {
LoadingLayoutProxy proxy = new LoadingLayoutProxy();
if (includeStart && mMode.showHeaderLoadingLayout()) {
proxy.addLayout(mHeaderLayout);
}
if (includeEnd && mMode.showFooterLoadingLayout()) {
proxy.addLayout(mFooterLayout);
}
return proxy;
}
/**
* This is implemented by derived classes to return the created View. If you
* need to use a custom View (such as a custom ListView), override this
* method and return an instance of your custom class.
* <p/>
* Be sure to set the ID of the view in this method, especially if you're
* using a ListActivity or ListFragment.
*
* @param context Context to create view with
* @param attrs AttributeSet from wrapped class. Means that anything you
* include in the XML layout declaration will be routed to the
* created View
* @return New instance of the Refreshable View
*/
protected abstract T createRefreshableView(Context context, AttributeSet attrs);
protected final void disableLoadingLayoutVisibilityChanges() {
mLayoutVisibilityChangesEnabled = false;
}
protected final LoadingLayout getFooterLayout() {
return mFooterLayout;
}
protected final int getFooterSize() {
return mFooterLayout.getContentSize();
}
protected final LoadingLayout getHeaderLayout() {
return mHeaderLayout;
}
protected final int getHeaderSize() {
return mHeaderLayout.getContentSize();
}
protected int getPullToRefreshScrollDuration() {
return SMOOTH_SCROLL_DURATION_MS;
}
protected int getPullToRefreshScrollDurationLonger() {
return SMOOTH_SCROLL_LONG_DURATION_MS;
}
protected FrameLayout getRefreshableViewWrapper() {
return mRefreshableViewWrapper;
}
/**
* Allows Derivative classes to handle the XML Attrs without creating a
* TypedArray themsevles
*
* @param a - TypedArray of PullToRefresh Attributes
*/
protected void handleStyledAttributes(TypedArray a) {
}
/**
* Implemented by derived class to return whether the View is in a state
* where the user can Pull to Refresh by scrolling from the start.
*
* @return true if the View is currently the correct state (for example, top
* of a ListView)
*/
protected abstract boolean isReadyForPullStart();
/**
* Implemented by derived class to return whether the View is in a state
* where the user can Pull to Refresh by scrolling from the end.
*
* @return true if the View is currently in the correct state (for example,
* bottom of a ListView)
*/
protected abstract boolean isReadyForPullEnd();
/**
* Called by {@link #onRestoreInstanceState(Parcelable)} so that derivative
* classes can handle their saved instance state.
*
* @param savedInstanceState - Bundle which contains saved instance state.
*/
protected void onPtrRestoreInstanceState(Bundle savedInstanceState) {
}
/**
* Called by {@link #onSaveInstanceState()} so that derivative classes can
* save their instance state.
*
* @param saveState - Bundle to be updated with saved state.
*/
protected void onPtrSaveInstanceState(Bundle saveState) {
}
@Override
protected final void onRestoreInstanceState(Parcelable state) {
if (state instanceof Bundle) {
Bundle bundle = (Bundle) state;
setMode(Mode.mapIntToValue(bundle.getInt(STATE_MODE, 0)));
mCurrentMode = Mode.mapIntToValue(bundle.getInt(STATE_CURRENT_MODE, 0));
mScrollingWhileRefreshingEnabled = bundle.getBoolean(STATE_SCROLLING_REFRESHING_ENABLED, false);
mShowViewWhileRefreshing = bundle.getBoolean(STATE_SHOW_REFRESHING_VIEW, true);
// Let super Restore Itself
super.onRestoreInstanceState(bundle.getParcelable(STATE_SUPER));
State viewState = State.mapIntToValue(bundle.getInt(STATE_STATE, 0));
if (viewState == State.REFRESHING || viewState == State.MANUAL_REFRESHING) {
setState(viewState, true);
}
// Now let derivative classes restore their state
onPtrRestoreInstanceState(bundle);
return;
}
super.onRestoreInstanceState(state);
}
@Override
protected final Parcelable onSaveInstanceState() {
Bundle bundle = new Bundle();
// Let derivative classes get a chance to save state first, that way we
// can make sure they don't overrite any of our values
onPtrSaveInstanceState(bundle);
bundle.putInt(STATE_STATE, mState.getIntValue());
bundle.putInt(STATE_MODE, mMode.getIntValue());
bundle.putInt(STATE_CURRENT_MODE, mCurrentMode.getIntValue());
bundle.putBoolean(STATE_SCROLLING_REFRESHING_ENABLED, mScrollingWhileRefreshingEnabled);
bundle.putBoolean(STATE_SHOW_REFRESHING_VIEW, mShowViewWhileRefreshing);
bundle.putParcelable(STATE_SUPER, super.onSaveInstanceState());
return bundle;
}
@Override
protected final void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
// We need to update the header/footer when our size changes
refreshLoadingViewsSize();
// Update the Refreshable View layout
refreshRefreshableViewSize(w, h);
}
/**
* Re-measure the Loading Views height, and adjust internal padding as
* necessary
*/
protected final void refreshLoadingViewsSize() {
final int maximumPullScroll = (int) (getMaximumPullScroll() * 1.2f);
int pLeft = getPaddingLeft();
int pTop = getPaddingTop();
int pRight = getPaddingRight();
int pBottom = getPaddingBottom();
switch (getPullToRefreshScrollDirection()) {
case HORIZONTAL:
if (mMode.showHeaderLoadingLayout()) {
mHeaderLayout.setWidth(maximumPullScroll);
pLeft = -maximumPullScroll;
} else {
pLeft = 0;
}
if (mMode.showFooterLoadingLayout()) {
mFooterLayout.setWidth(maximumPullScroll);
pRight = -maximumPullScroll;
} else {
pRight = 0;
}
break;
case VERTICAL:
if (mMode.showHeaderLoadingLayout()) {
mHeaderLayout.setHeight(maximumPullScroll);
pTop = -maximumPullScroll;
} else {
pTop = 0;
}
if (mMode.showFooterLoadingLayout()) {
mFooterLayout.setHeight(maximumPullScroll);
pBottom = -maximumPullScroll;
} else {
pBottom = 0;
}
break;
}
if (DEBUG) {
Log.d(LOG_TAG, String.format("Setting Padding. L: %d, T: %d, R: %d, B: %d", pLeft, pTop, pRight, pBottom));
}
setPadding(pLeft, pTop, pRight, pBottom);
}
protected final void refreshRefreshableViewSize(int width, int height) {
if (DEBUG) {
Log.d(LOG_TAG, String.format("refreshRefreshableViewSize. W: %d, H: %d", width, height));
}
// We need to set the Height of the Refreshable View to the same as
// this layout
LinearLayout.LayoutParams lp = (LinearLayout.LayoutParams) mRefreshableViewWrapper.getLayoutParams();
switch (getPullToRefreshScrollDirection()) {
case HORIZONTAL:
if (lp.width != width) {
lp.width = width;
mRefreshableViewWrapper.requestLayout();
}
break;
case VERTICAL:
if (lp.height != height) {
lp.height = height;
mRefreshableViewWrapper.requestLayout();
}
break;
}
}
/**
* Helper method which just calls scrollTo() in the correct scrolling
* direction.
*
* @param value - New Scroll value
*/
protected final void setHeaderScroll(final int value) {
if (DEBUG) {
Log.d(LOG_TAG, "setHeaderScroll: " + value);
}
if (mLayoutVisibilityChangesEnabled) {
if (value < 0) {
mHeaderLayout.setVisibility(View.VISIBLE);
} else if (value > 0) {
mFooterLayout.setVisibility(View.VISIBLE);
} else {
mHeaderLayout.setVisibility(View.INVISIBLE);
mFooterLayout.setVisibility(View.INVISIBLE);
}
}
/**
* Use a Hardware Layer on the Refreshable View if we've scrolled at
* all. We don't use them on the Header/Footer Views as they change
* often, which would negate any HW layer performance boost.
*/
ViewCompat.setLayerType(mRefreshableViewWrapper, value != 0 ? View.LAYER_TYPE_HARDWARE : View.LAYER_TYPE_NONE);
switch (getPullToRefreshScrollDirection()) {
case VERTICAL:
scrollTo(0, value);
break;
case HORIZONTAL:
scrollTo(value, 0);
break;
}
}
/**
* Smooth Scroll to position using the default duration of
* {@value #SMOOTH_SCROLL_DURATION_MS} ms.
*
* @param scrollValue - Position to scroll to
*/
protected final void smoothScrollTo(int scrollValue) {
smoothScrollTo(scrollValue, getPullToRefreshScrollDuration());
}
/**
* Smooth Scroll to position using the longer default duration of
* {@value #SMOOTH_SCROLL_LONG_DURATION_MS} ms.
*
* @param scrollValue - Position to scroll to
*/
protected final void smoothScrollToLonger(int scrollValue) {
smoothScrollTo(scrollValue, getPullToRefreshScrollDurationLonger());
}
/**
* Updates the View State when the mode has been set. This does not do any
* checking that the mode is different to current state so always updates.
*/
protected void updateUIForMode() {
// We need to use the correct LayoutParam values, based on scroll
// direction
final LinearLayout.LayoutParams lp = getLoadingLayoutLayoutParams();
// Remove Header, and then add Header Loading View again if needed
if (this == mHeaderLayout.getParent()) {
removeView(mHeaderLayout);
}
if (mMode.showHeaderLoadingLayout()) {
addViewInternal(mHeaderLayout, 0, lp);
}
// Remove Footer, and then add Footer Loading View again if needed
if (this == mFooterLayout.getParent()) {
removeView(mFooterLayout);
}
if (mMode.showFooterLoadingLayout()) {
addViewInternal(mFooterLayout, lp);
}
// Hide Loading Views
refreshLoadingViewsSize();
// If we're not using Mode.BOTH, set mCurrentMode to mMode, otherwise
// set it to pull down
mCurrentMode = (mMode != Mode.BOTH) ? mMode : Mode.PULL_FROM_START;
}
private void addRefreshableView(Context context, T refreshableView) {
mRefreshableViewWrapper = new FrameLayout(context);
mRefreshableViewWrapper.addView(refreshableView, ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.MATCH_PARENT);
addViewInternal(mRefreshableViewWrapper, new LinearLayout.LayoutParams(LayoutParams.MATCH_PARENT,
LayoutParams.MATCH_PARENT));
}
@SuppressWarnings("deprecation")
private void init(Context context, AttributeSet attrs) {
switch (getPullToRefreshScrollDirection()) {
case HORIZONTAL:
setOrientation(LinearLayout.HORIZONTAL);
break;
case VERTICAL:
default:
setOrientation(LinearLayout.VERTICAL);
break;
}
setGravity(Gravity.CENTER);
ViewConfiguration config = ViewConfiguration.get(context);
mTouchSlop = config.getScaledTouchSlop();
// Styleables from XML
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.PullToRefresh);
if (a.hasValue(R.styleable.PullToRefresh_ptrMode)) {
mMode = Mode.mapIntToValue(a.getInteger(R.styleable.PullToRefresh_ptrMode, 0));
}
if (a.hasValue(R.styleable.PullToRefresh_ptrAnimationStyle)) {
mLoadingAnimationStyle = AnimationStyle.mapIntToValue(a.getInteger(
R.styleable.PullToRefresh_ptrAnimationStyle, 0));
}
// Refreshable View
// By passing the attrs, we can add ListView/GridView params via XML
mRefreshableView = createRefreshableView(context, attrs);
addRefreshableView(context, mRefreshableView);
// We need to create now layouts now
mHeaderLayout = createLoadingLayout(context, Mode.PULL_FROM_START, a);
mFooterLayout = createLoadingLayout(context, Mode.PULL_FROM_END, a);
/**
* Styleables from XML
*/
if (a.hasValue(R.styleable.PullToRefresh_ptrRefreshableViewBackground)) {
Drawable background = a.getDrawable(R.styleable.PullToRefresh_ptrRefreshableViewBackground);
if (null != background) {
mRefreshableView.setBackgroundDrawable(background);
}
} else if (a.hasValue(R.styleable.PullToRefresh_ptrAdapterViewBackground)) {
Utils.warnDeprecation("ptrAdapterViewBackground", "ptrRefreshableViewBackground");
Drawable background = a.getDrawable(R.styleable.PullToRefresh_ptrAdapterViewBackground);
if (null != background) {
mRefreshableView.setBackgroundDrawable(background);
}
}
if (a.hasValue(R.styleable.PullToRefresh_ptrOverScroll)) {
mOverScrollEnabled = a.getBoolean(R.styleable.PullToRefresh_ptrOverScroll, true);
}
if (a.hasValue(R.styleable.PullToRefresh_ptrScrollingWhileRefreshingEnabled)) {
mScrollingWhileRefreshingEnabled = a.getBoolean(
R.styleable.PullToRefresh_ptrScrollingWhileRefreshingEnabled, false);
}
// Let the derivative classes have a go at handling attributes, then
// recycle them...
handleStyledAttributes(a);
a.recycle();
// Finally update the UI for the modes
updateUIForMode();
}
private boolean isReadyForPull() {
switch (mMode) {
case PULL_FROM_START:
return isReadyForPullStart();
case PULL_FROM_END:
return isReadyForPullEnd();
case BOTH:
return isReadyForPullEnd() || isReadyForPullStart();
default:
return false;
}
}
/**
* Actions a Pull Event
*
* @return true if the Event has been handled, false if there has been no
* change
*/
private void pullEvent() {
final int newScrollValue;
final int itemDimension;
final float initialMotionValue, lastMotionValue;
switch (getPullToRefreshScrollDirection()) {
case HORIZONTAL:
initialMotionValue = mInitialMotionX;
lastMotionValue = mLastMotionX;
break;
case VERTICAL:
default:
initialMotionValue = mInitialMotionY;
lastMotionValue = mLastMotionY;
break;
}
switch (mCurrentMode) {
case PULL_FROM_END:
newScrollValue = Math.round(Math.max(initialMotionValue - lastMotionValue, 0) / FRICTION);
itemDimension = getFooterSize();
break;
case PULL_FROM_START:
default:
newScrollValue = Math.round(Math.min(initialMotionValue - lastMotionValue, 0) / FRICTION);
itemDimension = getHeaderSize();
break;
}
setHeaderScroll(newScrollValue);
if (newScrollValue != 0 && !isRefreshing()) {
float scale = Math.abs(newScrollValue) / (float) itemDimension;
switch (mCurrentMode) {
case PULL_FROM_END:
mFooterLayout.onPull(scale);
break;
case PULL_FROM_START:
default:
mHeaderLayout.onPull(scale);
break;
}
if (mState != State.PULL_TO_REFRESH && itemDimension >= Math.abs(newScrollValue)) {
setState(State.PULL_TO_REFRESH);
} else if (mState == State.PULL_TO_REFRESH && itemDimension < Math.abs(newScrollValue)) {
setState(State.RELEASE_TO_REFRESH);
}
}
}
private LinearLayout.LayoutParams getLoadingLayoutLayoutParams() {
switch (getPullToRefreshScrollDirection()) {
case HORIZONTAL:
return new LinearLayout.LayoutParams(LinearLayout.LayoutParams.WRAP_CONTENT,
LinearLayout.LayoutParams.MATCH_PARENT);
case VERTICAL:
default:
return new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT,
LinearLayout.LayoutParams.WRAP_CONTENT);
}
}
private int getMaximumPullScroll() {
switch (getPullToRefreshScrollDirection()) {
case HORIZONTAL:
return Math.round(getWidth() / FRICTION);
case VERTICAL:
default:
return Math.round(getHeight() / FRICTION);
}
}
/**
* Smooth Scroll to position using the specific duration
*
* @param scrollValue - Position to scroll to
* @param duration - Duration of animation in milliseconds
*/
private final void smoothScrollTo(int scrollValue, long duration) {
smoothScrollTo(scrollValue, duration, 0, null);
}
private final void smoothScrollTo(int newScrollValue, long duration, long delayMillis,
OnSmoothScrollFinishedListener listener) {
if (null != mCurrentSmoothScrollRunnable) {
mCurrentSmoothScrollRunnable.stop();
}
final int oldScrollValue;
switch (getPullToRefreshScrollDirection()) {
case HORIZONTAL:
oldScrollValue = getScrollX();
break;
case VERTICAL:
default:
oldScrollValue = getScrollY();
break;
}
if (oldScrollValue != newScrollValue) {
if (null == mScrollAnimationInterpolator) {
// Default interpolator is a Decelerate Interpolator
mScrollAnimationInterpolator = new DecelerateInterpolator();
}
mCurrentSmoothScrollRunnable = new SmoothScrollRunnable(oldScrollValue, newScrollValue, duration, listener);
if (delayMillis > 0) {
postDelayed(mCurrentSmoothScrollRunnable, delayMillis);
} else {
post(mCurrentSmoothScrollRunnable);
}
}
}
private final void smoothScrollToAndBack(int y) {
smoothScrollTo(y, SMOOTH_SCROLL_DURATION_MS, 0, new OnSmoothScrollFinishedListener() {
@Override
public void onSmoothScrollFinished() {
smoothScrollTo(0, SMOOTH_SCROLL_DURATION_MS, DEMO_SCROLL_INTERVAL, null);
}
});
}
public static enum AnimationStyle {
/**
* This is the default for Android-PullToRefresh. Allows you to use any
* drawable, which is automatically rotated and used as a Progress Bar.
*/
ROTATE,
/**
* This is the old default, and what is commonly used on iOS. Uses an
* arrow image which flips depending on where the user has scrolled.
*/
FLIP;
static AnimationStyle getDefault() {
return ROTATE;
}
/**
* Maps an int to a specific mode. This is needed when saving state, or
* inflating the view from XML where the mode is given through a attr
* int.
*
* @param modeInt - int to map a Mode to
* @return Mode that modeInt maps to, or ROTATE by default.
*/
static AnimationStyle mapIntToValue(int modeInt) {
switch (modeInt) {
case 0x0:
default:
return ROTATE;
case 0x1:
return FLIP;
}
}
LoadingLayout createLoadingLayout(Context context, Mode mode, Orientation scrollDirection, TypedArray attrs) {
switch (this) {
case ROTATE:
default:
return new RotateLoadingLayout(context, mode, scrollDirection, attrs);
case FLIP:
return new FlipLoadingLayout(context, mode, scrollDirection, attrs);
}
}
}
public static enum Mode {
/**
* Disable all Pull-to-Refresh gesture and Refreshing handling
*/
DISABLED(0x0),
/**
* Only allow the user to Pull from the start of the Refreshable View to
* refresh. The start is either the Top or Left, depending on the
* scrolling direction.
*/
PULL_FROM_START(0x1),
/**
* Only allow the user to Pull from the end of the Refreshable View to
* refresh. The start is either the Bottom or Right, depending on the
* scrolling direction.
*/
PULL_FROM_END(0x2),
/**
* Allow the user to both Pull from the start, from the end to refresh.
*/
BOTH(0x3),
/**
* Disables Pull-to-Refresh gesture handling, but allows manually
* setting the Refresh state via
* {@link PullToRefreshBase#setRefreshing() setRefreshing()}.
*/
MANUAL_REFRESH_ONLY(0x4);
/**
* @deprecated Use {@link #PULL_FROM_START} from now on.
*/
public static Mode PULL_DOWN_TO_REFRESH = Mode.PULL_FROM_START;
/**
* @deprecated Use {@link #PULL_FROM_END} from now on.
*/
public static Mode PULL_UP_TO_REFRESH = Mode.PULL_FROM_END;
/**
* Maps an int to a specific mode. This is needed when saving state, or
* inflating the view from XML where the mode is given through a attr
* int.
*
* @param modeInt - int to map a Mode to
* @return Mode that modeInt maps to, or PULL_FROM_START by default.
*/
static Mode mapIntToValue(final int modeInt) {
for (Mode value : Mode.values()) {
if (modeInt == value.getIntValue()) {
return value;
}
}
// If not, return default
return getDefault();
}
static Mode getDefault() {
return PULL_FROM_START;
}
private int mIntValue;
// The modeInt values need to match those from attrs.xml
Mode(int modeInt) {
mIntValue = modeInt;
}
/**
* @return true if the mode permits Pull-to-Refresh
*/
boolean permitsPullToRefresh() {
return !(this == DISABLED || this == MANUAL_REFRESH_ONLY);
}
/**
* @return true if this mode wants the Loading Layout Header to be shown
*/
boolean showHeaderLoadingLayout() {
return this == PULL_FROM_START || this == BOTH;
}
/**
* @return true if this mode wants the Loading Layout Footer to be shown
*/
boolean showFooterLoadingLayout() {
return this == PULL_FROM_END || this == BOTH || this == MANUAL_REFRESH_ONLY;
}
int getIntValue() {
return mIntValue;
}
}
// ===========================================================
// Inner, Anonymous Classes, and Enumerations
// ===========================================================
/**
* Simple Listener that allows you to be notified when the user has scrolled
* to the end of the AdapterView. See (
* {@link PullToRefreshAdapterViewBase#setOnLastItemVisibleListener}.
*
* @author Chris Banes
*/
public static interface OnLastItemVisibleListener {
/**
* Called when the user has scrolled to the end of the list
*/
public void onLastItemVisible();
}
/**
* Listener that allows you to be notified when the user has started or
* finished a touch event. Useful when you want to append extra UI events
* (such as sounds). See (
* {@link PullToRefreshAdapterViewBase#setOnPullEventListener}.
*
* @author Chris Banes
*/
public static interface OnPullEventListener<V extends View> {
/**
* Called when the internal state has been changed, usually by the user
* pulling.
*
* @param refreshView - View which has had it's state change.
* @param state - The new state of View.
* @param direction - One of {@link Mode#PULL_FROM_START} or
* {@link Mode#PULL_FROM_END} depending on which direction
* the user is pulling. Only useful when <var>state</var> is
* {@link State#PULL_TO_REFRESH} or
* {@link State#RELEASE_TO_REFRESH}.
*/
public void onPullEvent(final PullToRefreshBase<V> refreshView, State state, Mode direction);
}
/**
* Simple Listener to listen for any callbacks to Refresh.
*
* @author Chris Banes
*/
public static interface OnRefreshListener<V extends View> {
/**
* onRefresh will be called for both a Pull from start, and Pull from
* end
*/
public void onRefresh(final PullToRefreshBase<V> refreshView);
}
/**
* An advanced version of the Listener to listen for callbacks to Refresh.
* This listener is different as it allows you to differentiate between Pull
* Ups, and Pull Downs.
*
* @author Chris Banes
*/
public static interface OnRefreshListener2<V extends View> {
// TODO These methods need renaming to START/END rather than DOWN/UP
/**
* onPullDownToRefresh will be called only when the user has Pulled from
* the start, and released.
*/
public void onPullDownToRefresh(final PullToRefreshBase<V> refreshView);
/**
* onPullUpToRefresh will be called only when the user has Pulled from
* the end, and released.
*/
public void onPullUpToRefresh(final PullToRefreshBase<V> refreshView);
}
public static enum Orientation {
VERTICAL, HORIZONTAL;
}
public static enum State {
/**
* When the UI is in a state which means that user is not interacting
* with the Pull-to-Refresh function.
*/
RESET(0x0),
/**
* When the UI is being pulled by the user, but has not been pulled far
* enough so that it refreshes when released.
*/
PULL_TO_REFRESH(0x1),
/**
* When the UI is being pulled by the user, and <strong>has</strong>
* been pulled far enough so that it will refresh when released.
*/
RELEASE_TO_REFRESH(0x2),
/**
* When the UI is currently refreshing, caused by a pull gesture.
*/
REFRESHING(0x8),
/**
* When the UI is currently refreshing, caused by a call to
* {@link PullToRefreshBase#setRefreshing() setRefreshing()}.
*/
MANUAL_REFRESHING(0x9),
/**
* When the UI is currently overscrolling, caused by a fling on the
* Refreshable View.
*/
OVERSCROLLING(0x10);
/**
* Maps an int to a specific state. This is needed when saving state.
*
* @param stateInt - int to map a State to
* @return State that stateInt maps to
*/
static State mapIntToValue(final int stateInt) {
for (State value : State.values()) {
if (stateInt == value.getIntValue()) {
return value;
}
}
// If not, return default
return RESET;
}
private int mIntValue;
State(int intValue) {
mIntValue = intValue;
}
int getIntValue() {
return mIntValue;
}
}
final class SmoothScrollRunnable implements Runnable {
private final Interpolator mInterpolator;
private final int mScrollToY;
private final int mScrollFromY;
private final long mDuration;
private OnSmoothScrollFinishedListener mListener;
private boolean mContinueRunning = true;
private long mStartTime = -1;
private int mCurrentY = -1;
public SmoothScrollRunnable(int fromY, int toY, long duration, OnSmoothScrollFinishedListener listener) {
mScrollFromY = fromY;
mScrollToY = toY;
mInterpolator = mScrollAnimationInterpolator;
mDuration = duration;
mListener = listener;
}
@Override
public void run() {
/**
* Only set mStartTime if this is the first time we're starting,
* else actually calculate the Y delta
*/
if (mStartTime == -1) {
mStartTime = System.currentTimeMillis();
} else {
/**
* We do do all calculations in long to reduce software float
* calculations. We use 1000 as it gives us good accuracy and
* small rounding errors
*/
long normalizedTime = (1000 * (System.currentTimeMillis() - mStartTime)) / mDuration;
normalizedTime = Math.max(Math.min(normalizedTime, 1000), 0);
final int deltaY = Math.round((mScrollFromY - mScrollToY)
* mInterpolator.getInterpolation(normalizedTime / 1000f));
mCurrentY = mScrollFromY - deltaY;
setHeaderScroll(mCurrentY);
}
// If we're not at the target Y, keep going...
if (mContinueRunning && mScrollToY != mCurrentY) {
ViewCompat.postOnAnimation(PullToRefreshBase.this, this);
} else {
if (null != mListener) {
mListener.onSmoothScrollFinished();
}
}
}
public void stop() {
mContinueRunning = false;
removeCallbacks(this);
}
}
static interface OnSmoothScrollFinishedListener {
void onSmoothScrollFinished();
}
}
| Pretty sure this finally fixes #185.
| library/src/com/handmark/pulltorefresh/library/PullToRefreshBase.java | Pretty sure this finally fixes #185. |
|
Java | bsd-3-clause | cdc7b96dbb8dd8596d4f34d2a5bd2a9d579baead | 0 | runnymederobotics/robot2011 | package edu.wpi.first.wpilibj.templates;
import edu.wpi.first.wpilibj.*;
//Driver joystick
class Driver {
//Buttons
static final int TRANS_TOGGLE_LOW = 7;
static final int TRANS_TOGGLE_HIGH = 8;
static final int ARCADE_TOGGLE = 1;
//Axes
static final int X_AXIS_LEFT = 1;
static final int Y_AXIS_LEFT = 2;
static final int X_AXIS_RIGHT = 3;
static final int Y_AXIS_RIGHT = 4;
}
//Operator joystick
class Operator {
//Buttons
static final int ELEVATOR_STATE_GROUND = 4;
static final int ELEVATOR_STATE_ONE = 11;
static final int ELEVATOR_STATE_TWO = 12;
static final int ELEVATOR_STATE_THREE = 9;
static final int ELEVATOR_STATE_FOUR = 10;
static final int ELEVATOR_STATE_FIVE = 7;
static final int ELEVATOR_STATE_SIX = 8;
static final int ELEVATOR_STATE_FEED = 6;
static final int ELEVATOR_MANUAL_TOGGLE = 5;
static final int GRIPPER_TOGGLE = 1;
static final int MINIBOT_RELEASE_ONE = 5;
static final int MINIBOT_RELEASE_TWO = 6;
static final int LIGHT_SELECTION = 2;
static final int LIGHT_RED = 7;
static final int LIGHT_WHITE = 9;
static final int LIGHT_BLUE = 11;
static final int LIGHT_OFF = 10;
}
//Enumeration of setpoints for different heights of the elevator
class ElevatorSetpoint {
static final double ground = 0;
static final double posOne = 250;
static final double posTwo = 620;
static final double posThree = 2000;
static final double posFour = 2360;
static final double posFive = 3750;
static final double posSix = 3900;
static final double feed = 1865;
}
class ElbowState {
static final int Horizontal = 0;
static final int Middle = 1;
static final int Vertical = 2;
}
//Enumeration of autonomous modes
class AutonomousState {
static final int Driving = 0;
static final int Turning = 1;
static final int Release = 2;
static final int Done = 3;
static final int Sleep = 4;
}
class Lights {
static final int Red = 0;
static final int White = 1;
static final int Blue = 2;
static final int Off = 3;
}
public class RobotTemplate extends IterativeRobot {
//Practise robot or competition robot
static final boolean PRACTISE_ROBOT = false;
//Encoder rate at max speed in slow gear
static final double SLOW_MAX_ENCODER_RATE = 750.0;
//Encoder rate at max speed in fast gear
static final double FAST_MAX_ENCODER_RATE = 1700.0;
//Speed to set the elevator motor to
static final double ELEVATOR_SPEED_UP = 1.0;
static final double ELEVATOR_SPEED_DOWN = 0.75;
//Max drive motor speed
static final double MAX_DRIVE_SPEED = 1.0;
//Encoder counts per metre travelled
static final double COUNTS_PER_METRE = 500;
//Number of elevator encoder counts
static final int MAX_ELEVATOR_COUNTS = 2400;
//Number of seconds to wait in teleoperated mode before the minibot is allowed to be deployed
static final double MINIBOT_RELEASE_TIME = 110.0;
//Number of seconds after the minibot drops before we send it out horizontally
static final double MINIBOT_SERVO_DELAY = 0.5;
//Tolerance for the gyro pid
static final double GYRO_TOLERANCE = 5.0;
//Delay between
static final double AUTONOMOUS_RELEASE_DELAY = 1.0;
//Print delay
static final double PRINT_DELAY = 0.5;
static final int AUTONOMOUS_DRIVE_COUNTS = 2500;
//distance in inches for scoring/feeding
static final double MIN_SCORING_DISTANCE = 30.0;
static final double MAX_SCORING_DISTANCE = 36.0;
static final double MIN_FEEDING_DISTANCE = 15.0;
static final double MAX_FEEDING_DISTANCE = 20.0;
static final double FLASH_TIME = 0.25;
static final double ULTRASONIC_VOLTS_PER_INCH = 0.0098;
//Driver station
DriverStation ds = DriverStation.getInstance();
//Joysticks
Joystick stickDriver = new Joystick(1);
Joystick stickOperator = new Joystick(2);
//Compressor, switch is DI 10, spike is relay 1
Compressor compressor = new Compressor(10, 1);
Pneumatic lightsOne;
Pneumatic lightsTwo;
//Solenoids for main robot or practise robot
Pneumatic transShift;
Pneumatic elbowTop;
Pneumatic elbowBottom;
Pneumatic gripper;
Pneumatic minibotRelease;
//Gyro
Gyro gyro = new Gyro(1);
PIDOutputStorage gyroOutput = new PIDOutputStorage();
//Jaguars
Jaguar jagLeft = new Jaguar(1);
Jaguar jagRight = new Jaguar(2);
Jaguar jagElevator = new Jaguar(3);
//Stores output from robotDrive
OutputStorage storageLeft = new OutputStorage();
OutputStorage storageRight = new OutputStorage();
//DI 3 doesn't work
Servo minibotServo = new Servo(4);
//Encoders
PIDEncoder encLeft;
Encoder encNull;
PIDEncoder encElevator;
PIDEncoder encRight;
DigitalInput elevatorLimit = new DigitalInput(8);
DigitalInput minibotLimit = new DigitalInput(7);
DigitalInput rightSensor = new DigitalInput(11);
DigitalInput middleSensor = new DigitalInput(12);
DigitalInput leftSensor = new DigitalInput(13);
AnalogChannel ultrasonicSensor = new AnalogChannel(2);
//Provides drive functions (arcade and tank drive)
RobotDrive robotDrive = new RobotDrive(storageLeft, storageRight);
//PIDs
PIDController pidLeft;
PIDController pidRight;
PIDController pidGyro;
boolean transState;
//Toggle for manual or automated elevator control
Toggle manualElevatorToggle = new Toggle(false);
//Toggle for the gripper
Toggle gripperToggle = new Toggle(false);
//Toggle for arcade/tank drive
Toggle arcadeToggle = new Toggle(true);
Toggle minibotToggle = new Toggle(false);
//State of elbow
int elbowState;
int lastElbowState;
//The elevator setpoint, determined by which button on the operator joystick is pressed
double elevatorSetpoint = ElevatorSetpoint.ground;
//Runs when the robot is turned
public void robotInit() {
transState = false;
if(!PRACTISE_ROBOT) {
encLeft = new PIDEncoder(true, 3, 4, true);
encNull = new Encoder(9, 14);
encElevator = new PIDEncoder(false, 5, 6, true);
encRight = new PIDEncoder(true, 1, 2, true);
}
else {
encLeft = new PIDEncoder(true, 5, 6, true);
encNull = new Encoder(3, 4);
encElevator = new PIDEncoder(false, 7, 8);
encRight = new PIDEncoder(true, 1, 2, true);
}
pidLeft = new PIDController(0.0, 0.0005, 0.0, encLeft, jagLeft, 0.005);
pidRight = new PIDController(0.0, 0.0005, 0.0, encRight, jagRight, 0.005);
pidGyro = new PIDController(0.0005, 0.0005, 0.0, gyro, gyroOutput, 0.005);
//Initialize our pneumatics if we are using the practise robot or the real robot
if(!PRACTISE_ROBOT) {
transShift = new Pneumatic(new Solenoid(4));
elbowTop = new Pneumatic(new Solenoid(3));
elbowBottom = new Pneumatic(new Solenoid(2));
gripper = new Pneumatic(new Solenoid(1));
minibotRelease = new Pneumatic(new DoubleSolenoid(6, 7));
lightsOne = new Pneumatic(new Relay(2));
lightsTwo = new Pneumatic(new Relay(3));
}
else {
transShift = new Pneumatic(new Relay(5));
elbowTop = new Pneumatic(new DoubleSolenoid(3, 4));
elbowBottom = new Pneumatic(new DoubleSolenoid(5, 6));
gripper = new Pneumatic(new DoubleSolenoid(1, 2));
minibotRelease = new Pneumatic(new DoubleSolenoid(7, 8));
lightsOne = new Pneumatic(new Relay(6));
lightsTwo = new Pneumatic(new Relay(8));
}
//Start our encoders
encRight.start();
encLeft.start();
encElevator.start();
//Start our elevator encoder at 0
encElevator.reset();
//Input/output range for left encoder/motors
pidLeft.setInputRange(-SLOW_MAX_ENCODER_RATE, SLOW_MAX_ENCODER_RATE);
pidLeft.setOutputRange(-MAX_DRIVE_SPEED, MAX_DRIVE_SPEED);
//Input/output range for right encoder/motors
pidRight.setInputRange(-SLOW_MAX_ENCODER_RATE, SLOW_MAX_ENCODER_RATE);
pidRight.setOutputRange(-MAX_DRIVE_SPEED, MAX_DRIVE_SPEED);
pidGyro.enable();
//Input/output range for the gyro PID
pidGyro.setInputRange(-360.0, 360.0);
pidGyro.setOutputRange(-0.5, 0.5);
//Start the compressor
compressor.start();
}
//Runs at the beginning of disabled period
public void disabledInit() {
//Disable PIDs
pidLeft.disable();
pidRight.disable();
pidGyro.disable();
}
//Runs periodically during disabled period
public void disabledPeriodic() {
//Call our print function with the current mode
print("Disabled");
}
//List of autonomous steps
Step stepList[] = null;
//Iterates through each step
int stepIndex;
boolean doNothing;
boolean trackLine;
boolean heightOne;
boolean heightTwo;
boolean heightThree;
boolean staggeredPeg;
boolean releaseTube;
boolean reverse;
double startPosition;
//Runs at the beginning of autonomous period
public void autonomousInit() {
//Digital/analog inputs
doNothing = ds.getDigitalIn(1);
trackLine = ds.getDigitalIn(2);
heightOne = ds.getDigitalIn(3);
heightTwo = ds.getDigitalIn(4);
heightThree = ds.getDigitalIn(5);
staggeredPeg = ds.getDigitalIn(6);
releaseTube = ds.getDigitalIn(7);
reverse = ds.getDigitalIn(8);
startPosition = ds.getAnalogIn(1);
//Minibot defaults to in
minibotRelease.set(false);
minibotServo.set(0);
manualElevatorToggle.set(false);
gripperToggle.set(false);
arcadeToggle.set(true);
minibotToggle.set(false);
//Default to slow driving mode
transShift.set(!PRACTISE_ROBOT);
//Reset gyro and enable PID on gyro
gyro.reset();
//Enable PID on wheels
pidLeft.enable();
pidRight.enable();
//Reset encoders
encLeft.reset();
encRight.reset();
//Current step
stepIndex = 0;
//Reset the counter for how many times the gyro has reached its setpoint
gyroCounter = 0;
setElbow(ElbowState.Vertical);
if(doNothing) {
stepList = new Step[] {
new Step(AutonomousState.Done),
};
}
else {
stepList = new Step[] {
new Step(AutonomousState.Driving, AUTONOMOUS_DRIVE_COUNTS),
new Step(AutonomousState.Release),
new Step(AutonomousState.Driving, reverse ? -AUTONOMOUS_DRIVE_COUNTS * 0.75 : 0),
new Step(AutonomousState.Turning, reverse ? 180 : 0),
new Step(AutonomousState.Done),
};
}
//Determine the setpoint of the elevator
elevatorSetpoint = (heightOne && !staggeredPeg) ? ElevatorSetpoint.posOne : elevatorSetpoint;
elevatorSetpoint = (heightOne && staggeredPeg) ? ElevatorSetpoint.posTwo : elevatorSetpoint;
elevatorSetpoint = (heightTwo && !staggeredPeg) ? ElevatorSetpoint.posThree : elevatorSetpoint;
elevatorSetpoint = (heightTwo && staggeredPeg) ? ElevatorSetpoint.posFour : elevatorSetpoint;
elevatorSetpoint = (heightThree && !staggeredPeg) ? ElevatorSetpoint.posFive : elevatorSetpoint;
elevatorSetpoint = (heightThree && staggeredPeg) ? ElevatorSetpoint.posSix : elevatorSetpoint;
}
//Runs periodically during autonomous period
public void autonomousPeriodic() {
//Call our print function with the current mode
print("Autonomous");
}
static final double DEFAULT_STEERING_GAIN = 0.65;
//Runs continuously during autonomous period
public void autonomousContinuous() {
if(trackLine && !doNothing) {
int binaryValue; // a single binary value of the three line tracking
// sensors
int previousValue = 0; // the binary value from the previous loop
double steeringGain; // the amount of steering correction to apply
// the power profiles for the straight and forked robot path. They are
// different to let the robot drive more slowly as the robot approaches
// the fork on the forked line case.
double forkProfile[] = {0.70, 0.70, 0.55, 0.60, 0.60, 0.50, 0.40, 0.00};
double straightProfile[] = {0.7, 0.7, 0.6, 0.6, 0.35, 0.35, 0.35, 0.0};
double powerProfile[]; // the selected power profile
// set the straightLine and left-right variables depending on chosen path
boolean straightLine = ds.getDigitalIn(1);
powerProfile = (straightLine) ? straightProfile : forkProfile;
double stopTime = (straightLine) ? 2.0 : 4.0; // when the robot should look for end
boolean goLeft = !ds.getDigitalIn(2) && !straightLine;
System.out.println("StraightLine: " + straightLine);
System.out.println("GoingLeft: " + goLeft);
boolean atCross = false; // if robot has arrived at end
// time the path over the line
Timer timer = new Timer();
timer.start();
timer.reset();
int oldTimeInSeconds = -1;
double time;
double speed, turn;
// loop until robot reaches "T" at end or 8 seconds has past
while ((time = timer.get()) < 8.0 && !atCross) {
int timeInSeconds = (int) time;
// read the sensors
int leftValue = leftSensor.get() ? 1 : 0;
int middleValue = middleSensor.get() ? 1 : 0;
int rightValue = rightSensor.get() ? 1 : 0;
// compute the single value from the 3 sensors. Notice that the bits
// for the outside sensors are flipped depending on left or right
// fork. Also the sign of the steering direction is different for left/right.
if (goLeft) {
binaryValue = leftValue * 4 + middleValue * 2 + rightValue;
steeringGain = -DEFAULT_STEERING_GAIN;
} else {
binaryValue = rightValue * 4 + middleValue * 2 + leftValue;
steeringGain = DEFAULT_STEERING_GAIN;
}
// get the default speed and turn rate at this time
speed = powerProfile[timeInSeconds];
turn = 0;
// different cases for different line tracking sensor readings
switch (binaryValue) {
case 1: // on line edge
turn = 0;
break;
case 7: // all sensors on (maybe at cross)
if (time > stopTime) {
atCross = true;
speed = 0;
}
break;
case 0: // all sensors off
if (previousValue == 0 || previousValue == 1) {
turn = steeringGain;
} else {
turn = -steeringGain;
}
break;
default: // all other cases
turn = -steeringGain;
}
// print current status for debugging
if (binaryValue != previousValue) {
System.out.println("Time: " + time + " Sensor: " + binaryValue + " speed: " + speed + " turn: " + turn + " atCross: " + atCross);
}
// set the robot speed and direction
robotDrive.arcadeDrive(speed, turn);
pidLeft.setSetpoint(storageLeft.get() * SLOW_MAX_ENCODER_RATE);
pidRight.setSetpoint(storageRight.get() * SLOW_MAX_ENCODER_RATE);
if (binaryValue != 0) {
previousValue = binaryValue;
}
oldTimeInSeconds = timeInSeconds;
Timer.delay(0.01);
}
// Done with loop - stop the robot. Robot ought to be at the end of the line
pidLeft.setSetpoint(0.0);
pidRight.setSetpoint(0.0);
}
else {
//Our current step in our list of steps
Step currentStep = stepList[stepIndex];
//The last step we did
int lastStepIndex = stepIndex;
//If we have a step to do
if(currentStep != null) {
//Switch the type of step
switch(currentStep.type) {
//If we want to drive forward
case AutonomousState.Driving:
int direction = currentStep.get() > 0 ? 1 : currentStep.get() < 0 ? -1 : 0;
//If we have reached our value for this step on the left or right side
boolean leftDone = false;
boolean rightDone = false;
if(direction == 1) {
final double distance = 100;//ultrasonic override//ultrasonicSensor.getVoltage() / ULTRASONIC_VOLTS_PER_INCH;
leftDone = -encLeft.encoder.get() >= currentStep.get() || distance <= MAX_SCORING_DISTANCE;
rightDone = encRight.encoder.get() >= currentStep.get() || distance <= MAX_SCORING_DISTANCE;
}
else if (direction == -1) {
leftDone = -encLeft.encoder.get() <= currentStep.get();
rightDone = encRight.encoder.get() <= currentStep.get();
if(-encLeft.encoder.get() <= currentStep.get() * 0.25 && encRight.encoder.get() <= currentStep.get() * 0.25)
setElbow(ElbowState.Vertical);
}
//Drive each side until we reach the value for each side
robotDrive.arcadeDrive(direction * 0.85, gyroPID(true, 0.0));
if(!leftDone)
pidLeft.setSetpoint(-storageLeft.get() * SLOW_MAX_ENCODER_RATE);
else
pidLeft.disable();
if(!rightDone)
pidRight.setSetpoint(-storageRight.get() * SLOW_MAX_ENCODER_RATE);
else
pidRight.disable();
//If the value is reached
if(elevatorPID() && leftDone && rightDone)
++stepIndex;
break;
//If we want to turn
case AutonomousState.Turning:
//Disable PIDs for smoother turning
if(pidLeft.isEnable() || pidRight.isEnable()) {
pidLeft.disable();
pidRight.disable();
}
if(false) {
//Set the setpoint for the gyro PID to the step's setpoint
pidGyro.setSetpoint(currentStep.get());
//Drive the motors with the output from the gyro PID
jagLeft.set(-gyroOutput.get());
jagRight.set(-gyroOutput.get());
//Difference between our position and our setpoint
final double delta = currentStep.get() - gyro.pidGet();
//If the gyro is below or above the target angle depending on the direction we are turning
if(Math.abs(delta) < GYRO_TOLERANCE)
++gyroCounter;
if(gyroCounter >= 10) {
++stepIndex;
pidLeft.enable();
pidRight.enable();
}
} else {
gyroPID(false, currentStep.get());
}
break;
//To release the tube
case AutonomousState.Release:
if(releaseTube) {
pidLeft.disable();
pidRight.disable();
setElbow(ElbowState.Middle);
Timer.delay(AUTONOMOUS_RELEASE_DELAY);
releaseTube();
elevatorSetpoint = ElevatorSetpoint.ground;
Timer.delay(AUTONOMOUS_RELEASE_DELAY);
}
++stepIndex;
break;
//If we are done our autonomous mode
case AutonomousState.Done:
pidLeft.disable();
pidRight.disable();
break;
//Sleep state
case AutonomousState.Sleep:
double time = currentStep.get();
pidLeft.disable();
pidRight.disable();
while(time > 0) {
print("Autonomous");
Timer.delay(1);
--time;
Watchdog.getInstance().feed();
}
pidLeft.enable();
pidLeft.enable();
++stepIndex;
break;
default:
++stepIndex;
break;
}
}
//If we want to go to the next step
if(lastStepIndex != stepIndex) {
//Reset everything
encLeft.reset();
encRight.reset();
gyro.reset();
jagElevator.set(0.0);
//Stop
pidLeft.enable();
pidRight.enable();
pidLeft.setSetpoint(0.0);
pidRight.setSetpoint(0.0);
//Reset gyro counter to 0
gyroCounter = 0;
System.out.println("Step: " + stepIndex);
}
}
}
//Start time for teleoperated mode
double teleopStartTime;
int lightState = Lights.Off;
//Runs at the beginning of teleoperated period
public void teleopInit() {
//Initialize variables
teleopStartTime = Timer.getFPGATimestamp();
//Minibot defaults to in
minibotRelease.set(false);
minibotServo.set(0);
manualElevatorToggle.set(false);
gripperToggle.set(false);
arcadeToggle.set(true);
minibotToggle.set(false);
}
//Runs periodically during teleoperated period
public void teleopPeriodic() {
//Call our print function with the current mode
print("Teleoperated");
}
int lastColor = Lights.Off;
//Runs continuously during teleoperated period
public void teleopContinuous() {
//Don't allow the gyro to be more or less than 360 degrees
if(gyro.pidGet() < -360 || gyro.pidGet() > 360)
gyro.reset();
boolean finale = Timer.getFPGATimestamp() - teleopStartTime >= MINIBOT_RELEASE_TIME;
final double distance = ultrasonicSensor.getVoltage() / ULTRASONIC_VOLTS_PER_INCH;
boolean flashCurrentColor = false;
if(!gripper.get()) { //trying to score
if(distance > MIN_SCORING_DISTANCE && distance < MAX_SCORING_DISTANCE)
flashCurrentColor = true;
}
else if(elevatorSetpoint == ElevatorSetpoint.feed) { //gripper is open and feed position
if(distance > MIN_FEEDING_DISTANCE && distance < MAX_FEEDING_DISTANCE)
flashCurrentColor = true;
}
if(stickOperator.getRawButton(Operator.LIGHT_SELECTION)) {
if(stickOperator.getRawButton(Operator.LIGHT_RED))
lightState = Lights.Red;
if(stickOperator.getRawButton(Operator.LIGHT_WHITE))
lightState = Lights.White;
if(stickOperator.getRawButton(Operator.LIGHT_BLUE))
lightState = Lights.Blue;
if(stickOperator.getRawButton(Operator.LIGHT_OFF))
lightState = Lights.Off;
}
else {
//The elevator setpoint based on the corresponding button
elevatorSetpoint = stickOperator.getRawButton(Operator.ELEVATOR_STATE_GROUND) ? ElevatorSetpoint.ground : elevatorSetpoint;
elevatorSetpoint = stickOperator.getRawButton(Operator.ELEVATOR_STATE_ONE) ? ElevatorSetpoint.posOne : elevatorSetpoint;
elevatorSetpoint = stickOperator.getRawButton(Operator.ELEVATOR_STATE_TWO) ? ElevatorSetpoint.posTwo : elevatorSetpoint;
elevatorSetpoint = stickOperator.getRawButton(Operator.ELEVATOR_STATE_THREE) ? ElevatorSetpoint.posThree : elevatorSetpoint;
elevatorSetpoint = stickOperator.getRawButton(Operator.ELEVATOR_STATE_FOUR) ? ElevatorSetpoint.posFour : elevatorSetpoint;
elevatorSetpoint = stickOperator.getRawButton(Operator.ELEVATOR_STATE_FIVE) ? ElevatorSetpoint.posFive : elevatorSetpoint;
elevatorSetpoint = stickOperator.getRawButton(Operator.ELEVATOR_STATE_SIX) ? ElevatorSetpoint.posSix : elevatorSetpoint;
elevatorSetpoint = !finale /*this is one of the minibot release buttons*/ && stickOperator.getRawButton(Operator.ELEVATOR_STATE_FEED) ? ElevatorSetpoint.feed : elevatorSetpoint;
}
flashLED(finale, flashCurrentColor);
//Disable manual elevator toggle during the finale
manualElevatorToggle.feed(!finale && stickOperator.getRawButton(Operator.ELEVATOR_MANUAL_TOGGLE));
//Manual or automated elevator control
if(manualElevatorToggle.get()) {
double axis = stickOperator.getAxis(Joystick.AxisType.kY);
if(!elevatorLimit.get())
axis = Math.max(axis, 0);
if(axis > 0)
axis *= ELEVATOR_SPEED_DOWN;
jagElevator.set(axis);
} else {
elevatorPID();
}
//Minus because the left encoder is negative
double rate = Math.abs((encRight.pidGet() - encLeft.pidGet()) / 2);
final double LOW_SPEED_PERCENT = 0.9;
final double HIGH_SPEED_PERCENT = 0.6;
if(elbowState == ElbowState.Vertical) {
if(!transState)
transState = rate >= LOW_SPEED_PERCENT * SLOW_MAX_ENCODER_RATE && Math.abs(stickDriver.getRawAxis(Driver.Y_AXIS_LEFT)) >= LOW_SPEED_PERCENT ? true : transState;
else if(transState)
transState = rate <= HIGH_SPEED_PERCENT * FAST_MAX_ENCODER_RATE && Math.abs(stickDriver.getRawAxis(Driver.Y_AXIS_LEFT)) <= HIGH_SPEED_PERCENT ? false : transState;
transState = stickDriver.getRawButton(Driver.TRANS_TOGGLE_LOW) ? false : transState;
transState = stickDriver.getRawButton(Driver.TRANS_TOGGLE_HIGH) ? true : transState;
}
else
transState = false; //Low gear
//Set the transmission shifter to open or closed based on the state of the toggle
transShift.set(PRACTISE_ROBOT ? transState : !transState);
//Determine the input range to use (max encoder rate) to use depending on the transmission state we are in
double maxEncoderRate = transState ? FAST_MAX_ENCODER_RATE : SLOW_MAX_ENCODER_RATE;
pidLeft.setInputRange(-maxEncoderRate, maxEncoderRate);
pidRight.setInputRange(-maxEncoderRate, maxEncoderRate);
//Feed the toggle on the gripper button
gripperToggle.feed(stickOperator.getRawButton(Operator.GRIPPER_TOGGLE));
//Set the gripper to open or closed based on the state of the toggle
if(elbowState != ElbowState.Vertical)
gripper.set(gripperToggle.get());
double elbowInput = -stickOperator.getAxis(Joystick.AxisType.kThrottle);
if(elbowInput < -0.5)
elbowState = ElbowState.Horizontal;
else if(elbowInput > 0.5)
elbowState = ElbowState.Vertical;
else
elbowState = ElbowState.Middle;
setElbow(elbowState);
if(elbowState == ElbowState.Horizontal && lastElbowState == ElbowState.Middle)
gripperToggle.set(true);
lastElbowState = elbowState;
//Feed the toggle on the arcade/tank drive button
//arcadeToggle.feed(stickDriver.getRawButton(Driver.ARCADE_TOGGLE));
final boolean doPID = false;
//Drive arcade or tank based on the state of the toggle
if(arcadeToggle.get()) {
//If PID is disabled
if((!pidLeft.isEnable() || !pidRight.isEnable()) && doPID) {
//Enable PID
pidLeft.enable();
pidRight.enable();
}
if((pidLeft.isEnable() || pidRight.isEnable()) && !doPID) {
pidLeft.disable();
pidRight.disable();
}
double driveAxis = stickDriver.getRawAxis(Driver.Y_AXIS_LEFT);
driveAxis = Math.abs(driveAxis) < 0.2 ? 0.0 : driveAxis;
double turnAxis = stickDriver.getRawAxis(Driver.X_AXIS_RIGHT);
turnAxis = Math.abs(turnAxis) < 0.2 ? 0.0 : turnAxis;
//Let the robotdrive class calculate arcade drive for us
robotDrive.arcadeDrive(driveAxis, turnAxis);
if(doPID) {
pidLeft.setSetpoint(storageLeft.get() * maxEncoderRate);
pidRight.setSetpoint(storageRight.get() * maxEncoderRate);
}
else {
jagLeft.set(storageLeft.get());
jagRight.set(storageRight.get());
}
}
else {
//If PID is disabled
if((!pidLeft.isEnable() || !pidRight.isEnable()) && doPID) {
//Enable PID
pidLeft.enable();
pidRight.enable();
}
if((pidLeft.isEnable() || pidRight.isEnable()) && !doPID) {
pidLeft.disable();
pidRight.disable();
}
//Left axis
double leftAxis = stickDriver.getRawAxis(Driver.Y_AXIS_LEFT);
//Any value less than 0.2 is set to 0.0 to create a dead zone
leftAxis = Math.abs(leftAxis) < 0.2 ? 0.0 : leftAxis;
//Right axis
double rightAxis = stickDriver.getRawAxis(Driver.Y_AXIS_RIGHT);
//Any value less than 0.2 is set to 0.0 to create a dead zone
rightAxis = Math.abs(rightAxis) < 0.2 ? 0.0 : rightAxis;
if(doPID) {
//Set the setpoint as a percentage of the maximum encoder rate
pidLeft.setSetpoint(leftAxis * maxEncoderRate);
pidRight.setSetpoint(-rightAxis * maxEncoderRate);
}
else {
jagLeft.set(leftAxis);
jagRight.set(-rightAxis);
}
}
//If there are 10 seconds left
if(finale) {
minibotToggle.feed(stickOperator.getRawButton(Operator.MINIBOT_RELEASE_ONE) && stickOperator.getRawButton(Operator.MINIBOT_RELEASE_TWO));
minibotRelease.set(minibotToggle.get());
if(minibotToggle.get() && !minibotLimit.get()) //Minibot limit switch is engaged when false
minibotServo.set(255);
}
}
//Returns whether or not the setpoint has been reached
public boolean elevatorPID() {
//Difference between setpoint and our position
final double error = elevatorSetpoint - encElevator.pidGet();
//We can be off by 5%
final double toleranceWhileGoingUp = MAX_ELEVATOR_COUNTS * 0.05;
final double toleranceWhileGoingDown = -MAX_ELEVATOR_COUNTS * 0.05;
//Go up when below setpoint, down when above setpoint
if(error > 0 && error > toleranceWhileGoingUp)
jagElevator.set(ELEVATOR_SPEED_UP);
else if(error < 0 && error < toleranceWhileGoingDown && elevatorLimit.get()) //Cant go down unless elevator limit is disengaged
jagElevator.set(-ELEVATOR_SPEED_DOWN);
else {
jagElevator.set(0.0);
return true;
}
return false;
}
public void setElbow(int state) {
//Update the elbow state
elbowState = state;
//For the elbow pneumatics, closed = true open = false
//The top elbow is only ever closed in the vertical state
elbowTop.set(elbowState != ElbowState.Vertical);
//The bottom elbow is only ever open in the horizontal state
elbowBottom.set(elbowState == ElbowState.Horizontal);
//If we are vertical then close the gripper
if(elbowState == ElbowState.Vertical) {
gripper.set(false);
gripperToggle.set(false);
}
}
//Number of times our setpoint has been reached
int gyroCounter;
public double gyroPID(boolean returnSpeed, double target) {
//Use our own calculations to get to the setpoint of the gyro
final double delta = target - gyro.getAngle();
//For straight driving in autonomous mode
if(returnSpeed) {
if(Math.abs(delta) < GYRO_TOLERANCE)
++gyroCounter;
if(gyroCounter >= 100) {
gyroCounter = 0;
return 0.0;
}
//The speed is incorporated into straight driving so it has to be low
final double speed = 0.1;
return delta > 0 ? -speed :speed;
}
//For turning on the spot
else {
if(Math.abs(delta) < GYRO_TOLERANCE)
++gyroCounter;
if(gyroCounter >= 100)
++stepIndex;
//We are turning on the spot so the turning speed is high
final double speed = 0.85;
jagLeft.set(delta >= 0 ? -speed : speed);
jagRight.set(delta >= 0 ? -speed : speed);
return 0.0;
}
}
public void releaseTube() {
setElbow(ElbowState.Horizontal);
gripper.set(true);
//Toggle the gripper to be open at the beginning of teleop
gripperToggle.set(true);
}
double flashTime = 0;
boolean flash = false;
public void flashLED(boolean finale, boolean flashingColor) {
double now = Timer.getFPGATimestamp();
if(now - flashTime > FLASH_TIME) {
flash = !flash;
flashTime = now;
}
if(finale) {
if(flash) {
lightsOne.set(false);
lightsTwo.set(true);
}
else {
lightsOne.set(true);
lightsTwo.set(false);
}
}
else {
switch(lightState) {
case Lights.Red:
lightsOne.set(true);
if(flashingColor && !flash)
lightsOne.relay.set(Relay.Value.kOff);
lightsTwo.relay.set(Relay.Value.kOff);
break;
case Lights.White:
lightsOne.set(false);
if(flashingColor && !flash)
lightsOne.relay.set(Relay.Value.kOff);
lightsTwo.relay.set(Relay.Value.kOff);
break;
case Lights.Blue:
lightsOne.relay.set(Relay.Value.kOff);
lightsTwo.set(true);
if(flashingColor && !flash)
lightsTwo.relay.set(Relay.Value.kOff);
break;
case Lights.Off:
//If the lights are off but we want to flash (distance sensor) default to red light
if(flashingColor) {
lightsOne.set(true);
if(!flash)
lightsOne.relay.set(Relay.Value.kOff);
lightsTwo.relay.set(Relay.Value.kOff);
}
else {
lightsOne.relay.set(Relay.Value.kOff);
lightsTwo.relay.set(Relay.Value.kOff);
}
break;
default:
lightsOne.relay.set(Relay.Value.kOff);
lightsTwo.relay.set(Relay.Value.kOff);
break;
}
if(transState) {
lightsOne.relay.set(Relay.Value.kOff);
lightsTwo.set(false);
}
}
}
double lastPrintTime = 0;
//Print function for our variables
public void print(String mode) {
//Current time
final double curPrintTime = Timer.getFPGATimestamp();
//If it has been more than half a second
if(curPrintTime - lastPrintTime > PRINT_DELAY) {
//Make a bunch of newlines to clear the screen to only show the current output
System.out.println("\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
//Print statements
System.out.println("[" + mode + "]");
System.out.println("gripperToggle: " + gripperToggle.get());
System.out.println("renc count: " + encRight.encoder.get() + " lenc count: " + encLeft.encoder.get() + " elevator counts: " + encElevator.pidGet());
System.out.println("rencRate: " + encRight.pidGet() + " lencRate: " + encLeft.pidGet());
System.out.println("rSet: " + pidRight.getSetpoint() + " lSet: " + pidLeft.getSetpoint() + " eSet: " + elevatorSetpoint);
System.out.println("rPID: " + pidRight.get() + " lPID: " + pidLeft.get());
System.out.println("manualElevator: " + manualElevatorToggle.get());
System.out.println("elevAxis: " + stickOperator.getAxis(Joystick.AxisType.kY) + " leftAxis: " + stickDriver.getRawAxis(Driver.Y_AXIS_LEFT) + " rightAxis: " + stickDriver.getRawAxis(Driver.Y_AXIS_RIGHT));
System.out.println("Gyro PIDget: " + gyro.pidGet() + " gyro output storage: " + gyroOutput.get());
System.out.println("jagLeft: " + jagLeft.get() + " jagRight: " + jagRight.get());
System.out.println("elbow input: " + stickOperator.getThrottle() + "elbowState: " + elbowState);
//System.out.println("Raven gyro min: " + gyro.min + " max: " + gyro.max + " deadzone: " + gyro.deadzone + " center: " + gyro.center);
System.out.println("rightSensor: " + rightSensor.get() + " middleSensor: " + middleSensor.get() + " leftSensor: " + leftSensor.get());
System.out.println("light: " + lightState);
System.out.println("ultrasonic distance: " + ultrasonicSensor.getVoltage() / ULTRASONIC_VOLTS_PER_INCH);
System.out.println("limitelev: " + elevatorLimit.get() + " minibotlimit: " + minibotLimit.get());
System.out.println("minibotRelease: " + minibotToggle.get());
//Update the last print time
lastPrintTime = curPrintTime;
}
}
}
| src/edu/wpi/first/wpilibj/templates/RobotTemplate.java | package edu.wpi.first.wpilibj.templates;
import edu.wpi.first.wpilibj.*;
//Driver joystick
class Driver {
//Buttons
static final int TRANS_TOGGLE_LOW = 7;
static final int TRANS_TOGGLE_HIGH = 8;
static final int ARCADE_TOGGLE = 1;
//Axes
static final int X_AXIS_LEFT = 1;
static final int Y_AXIS_LEFT = 2;
static final int X_AXIS_RIGHT = 3;
static final int Y_AXIS_RIGHT = 4;
}
//Operator joystick
class Operator {
//Buttons
static final int ELEVATOR_STATE_GROUND = 4;
static final int ELEVATOR_STATE_ONE = 11;
static final int ELEVATOR_STATE_TWO = 12;
static final int ELEVATOR_STATE_THREE = 9;
static final int ELEVATOR_STATE_FOUR = 10;
static final int ELEVATOR_STATE_FIVE = 7;
static final int ELEVATOR_STATE_SIX = 8;
static final int ELEVATOR_STATE_FEED = 6;
static final int ELEVATOR_MANUAL_TOGGLE = 5;
static final int GRIPPER_TOGGLE = 1;
static final int MINIBOT_RELEASE_ONE = 5;
static final int MINIBOT_RELEASE_TWO = 6;
static final int LIGHT_SELECTION = 2;
static final int LIGHT_RED = 7;
static final int LIGHT_WHITE = 9;
static final int LIGHT_BLUE = 11;
static final int LIGHT_OFF = 10;
}
//Enumeration of setpoints for different heights of the elevator
class ElevatorSetpoint {
static final double ground = 0;
static final double posOne = 250;
static final double posTwo = 620;
static final double posThree = 2000;
static final double posFour = 2360;
static final double posFive = 3750;
static final double posSix = 3900;
static final double feed = 1865;
}
class ElbowState {
static final int Horizontal = 0;
static final int Middle = 1;
static final int Vertical = 2;
}
//Enumeration of autonomous modes
class AutonomousState {
static final int Driving = 0;
static final int Turning = 1;
static final int Release = 2;
static final int Done = 3;
static final int Sleep = 4;
}
class Lights {
static final int Red = 0;
static final int White = 1;
static final int Blue = 2;
static final int Off = 3;
}
public class RobotTemplate extends IterativeRobot {
//Practise robot or competition robot
static final boolean PRACTISE_ROBOT = false;
//Encoder rate at max speed in slow gear
static final double SLOW_MAX_ENCODER_RATE = 750.0;
//Encoder rate at max speed in fast gear
static final double FAST_MAX_ENCODER_RATE = 1700.0;
//Speed to set the elevator motor to
static final double ELEVATOR_SPEED_UP = 1.0;
static final double ELEVATOR_SPEED_DOWN = 0.75;
//Max drive motor speed
static final double MAX_DRIVE_SPEED = 1.0;
//Encoder counts per metre travelled
static final double COUNTS_PER_METRE = 500;
//Number of elevator encoder counts
static final int MAX_ELEVATOR_COUNTS = 2400;
//Number of seconds to wait in teleoperated mode before the minibot is allowed to be deployed
static final double MINIBOT_RELEASE_TIME = 110.0;
//Number of seconds after the minibot drops before we send it out horizontally
static final double MINIBOT_SERVO_DELAY = 0.5;
//Tolerance for the gyro pid
static final double GYRO_TOLERANCE = 5.0;
//Delay between
static final double AUTONOMOUS_RELEASE_DELAY = 0.5;
//Print delay
static final double PRINT_DELAY = 0.5;
static final int AUTONOMOUS_DRIVE_COUNTS = 2600;
//distance in inches for scoring/feeding
static final double MIN_SCORING_DISTANCE = 30.0;
static final double MAX_SCORING_DISTANCE = 36.0;
static final double MIN_FEEDING_DISTANCE = 15.0;
static final double MAX_FEEDING_DISTANCE = 20.0;
static final double FLASH_TIME = 0.25;
static final double ULTRASONIC_VOLTS_PER_INCH = 0.0098;
//Driver station
DriverStation ds = DriverStation.getInstance();
//Joysticks
Joystick stickDriver = new Joystick(1);
Joystick stickOperator = new Joystick(2);
//Compressor, switch is DI 10, spike is relay 1
Compressor compressor = new Compressor(10, 1);
Pneumatic lightsOne;
Pneumatic lightsTwo;
//Solenoids for main robot or practise robot
Pneumatic transShift;
Pneumatic elbowTop;
Pneumatic elbowBottom;
Pneumatic gripper;
Pneumatic minibotRelease;
//Gyro
Gyro gyro = new Gyro(1);
PIDOutputStorage gyroOutput = new PIDOutputStorage();
//Jaguars
Jaguar jagLeft = new Jaguar(1);
Jaguar jagRight = new Jaguar(2);
Jaguar jagElevator = new Jaguar(3);
//Stores output from robotDrive
OutputStorage storageLeft = new OutputStorage();
OutputStorage storageRight = new OutputStorage();
//DI 3 doesn't work
Servo minibotServo = new Servo(4);
//Encoders
PIDEncoder encLeft;
Encoder encNull;
PIDEncoder encElevator;
PIDEncoder encRight;
DigitalInput elevatorLimit = new DigitalInput(8);
DigitalInput minibotLimit = new DigitalInput(7);
DigitalInput rightSensor = new DigitalInput(11);
DigitalInput middleSensor = new DigitalInput(12);
DigitalInput leftSensor = new DigitalInput(13);
AnalogChannel ultrasonicSensor = new AnalogChannel(2);
//Provides drive functions (arcade and tank drive)
RobotDrive robotDrive = new RobotDrive(storageLeft, storageRight);
//PIDs
PIDController pidLeft;
PIDController pidRight;
PIDController pidGyro;
boolean transState;
//Toggle for manual or automated elevator control
Toggle manualElevatorToggle = new Toggle(false);
//Toggle for the gripper
Toggle gripperToggle = new Toggle(false);
//Toggle for arcade/tank drive
Toggle arcadeToggle = new Toggle(true);
Toggle minibotToggle = new Toggle(false);
//State of elbow
int elbowState;
int lastElbowState;
//The elevator setpoint, determined by which button on the operator joystick is pressed
double elevatorSetpoint = ElevatorSetpoint.ground;
//Runs when the robot is turned
public void robotInit() {
transState = false;
if(!PRACTISE_ROBOT) {
encLeft = new PIDEncoder(true, 3, 4, true);
encNull = new Encoder(9, 14);
encElevator = new PIDEncoder(false, 5, 6, true);
encRight = new PIDEncoder(true, 1, 2, true);
}
else {
encLeft = new PIDEncoder(true, 5, 6, true);
encNull = new Encoder(3, 4);
encElevator = new PIDEncoder(false, 7, 8);
encRight = new PIDEncoder(true, 1, 2, true);
}
pidLeft = new PIDController(0.0, 0.0005, 0.0, encLeft, jagLeft, 0.005);
pidRight = new PIDController(0.0, 0.0005, 0.0, encRight, jagRight, 0.005);
pidGyro = new PIDController(0.0005, 0.0005, 0.0, gyro, gyroOutput, 0.005);
//Initialize our pneumatics if we are using the practise robot or the real robot
if(!PRACTISE_ROBOT) {
transShift = new Pneumatic(new Solenoid(4));
elbowTop = new Pneumatic(new Solenoid(3));
elbowBottom = new Pneumatic(new Solenoid(2));
gripper = new Pneumatic(new Solenoid(1));
minibotRelease = new Pneumatic(new DoubleSolenoid(6, 7));
lightsOne = new Pneumatic(new Relay(2));
lightsTwo = new Pneumatic(new Relay(3));
}
else {
transShift = new Pneumatic(new Relay(5));
elbowTop = new Pneumatic(new DoubleSolenoid(3, 4));
elbowBottom = new Pneumatic(new DoubleSolenoid(5, 6));
gripper = new Pneumatic(new DoubleSolenoid(1, 2));
minibotRelease = new Pneumatic(new DoubleSolenoid(7, 8));
lightsOne = new Pneumatic(new Relay(6));
lightsTwo = new Pneumatic(new Relay(8));
}
//Start our encoders
encRight.start();
encLeft.start();
encElevator.start();
//Start our elevator encoder at 0
encElevator.reset();
//Input/output range for left encoder/motors
pidLeft.setInputRange(-SLOW_MAX_ENCODER_RATE, SLOW_MAX_ENCODER_RATE);
pidLeft.setOutputRange(-MAX_DRIVE_SPEED, MAX_DRIVE_SPEED);
//Input/output range for right encoder/motors
pidRight.setInputRange(-SLOW_MAX_ENCODER_RATE, SLOW_MAX_ENCODER_RATE);
pidRight.setOutputRange(-MAX_DRIVE_SPEED, MAX_DRIVE_SPEED);
pidGyro.enable();
//Input/output range for the gyro PID
pidGyro.setInputRange(-360.0, 360.0);
pidGyro.setOutputRange(-0.5, 0.5);
//Start the compressor
compressor.start();
}
//Runs at the beginning of disabled period
public void disabledInit() {
//Disable PIDs
pidLeft.disable();
pidRight.disable();
pidGyro.disable();
}
//Runs periodically during disabled period
public void disabledPeriodic() {
//Call our print function with the current mode
print("Disabled");
}
//List of autonomous steps
Step stepList[] = null;
//Iterates through each step
int stepIndex;
boolean doNothing;
boolean trackLine;
boolean heightOne;
boolean heightTwo;
boolean heightThree;
boolean staggeredPeg;
boolean releaseTube;
boolean reverse;
double startPosition;
//Runs at the beginning of autonomous period
public void autonomousInit() {
//Digital/analog inputs
doNothing = ds.getDigitalIn(1);
trackLine = ds.getDigitalIn(2);
heightOne = ds.getDigitalIn(3);
heightTwo = ds.getDigitalIn(4);
heightThree = ds.getDigitalIn(5);
staggeredPeg = ds.getDigitalIn(6);
releaseTube = ds.getDigitalIn(7);
reverse = ds.getDigitalIn(8);
startPosition = ds.getAnalogIn(1);
//Minibot defaults to in
minibotRelease.set(false);
minibotServo.set(0);
manualElevatorToggle.set(false);
gripperToggle.set(false);
arcadeToggle.set(true);
minibotToggle.set(false);
//Default to slow driving mode
transShift.set(!PRACTISE_ROBOT);
//Reset gyro and enable PID on gyro
gyro.reset();
//Enable PID on wheels
pidLeft.enable();
pidRight.enable();
//Reset encoders
encLeft.reset();
encRight.reset();
//Current step
stepIndex = 0;
//Reset the counter for how many times the gyro has reached its setpoint
gyroCounter = 0;
setElbow(ElbowState.Vertical);
if(doNothing) {
stepList = new Step[] {
new Step(AutonomousState.Done),
};
}
else {
stepList = new Step[] {
new Step(AutonomousState.Driving, AUTONOMOUS_DRIVE_COUNTS),
new Step(AutonomousState.Release),
new Step(AutonomousState.Driving, reverse ? -AUTONOMOUS_DRIVE_COUNTS * 0.75 : 0),
new Step(AutonomousState.Turning, reverse ? 180 : 0),
new Step(AutonomousState.Done),
};
}
//Determine the setpoint of the elevator
elevatorSetpoint = (heightOne && !staggeredPeg) ? ElevatorSetpoint.posOne : elevatorSetpoint;
elevatorSetpoint = (heightOne && staggeredPeg) ? ElevatorSetpoint.posTwo : elevatorSetpoint;
elevatorSetpoint = (heightTwo && !staggeredPeg) ? ElevatorSetpoint.posThree : elevatorSetpoint;
elevatorSetpoint = (heightTwo && staggeredPeg) ? ElevatorSetpoint.posFour : elevatorSetpoint;
elevatorSetpoint = (heightThree && !staggeredPeg) ? ElevatorSetpoint.posFive : elevatorSetpoint;
elevatorSetpoint = (heightThree && staggeredPeg) ? ElevatorSetpoint.posSix : elevatorSetpoint;
}
//Runs periodically during autonomous period
public void autonomousPeriodic() {
//Call our print function with the current mode
print("Autonomous");
}
static final double DEFAULT_STEERING_GAIN = 0.65;
//Runs continuously during autonomous period
public void autonomousContinuous() {
if(trackLine && !doNothing) {
int binaryValue; // a single binary value of the three line tracking
// sensors
int previousValue = 0; // the binary value from the previous loop
double steeringGain; // the amount of steering correction to apply
// the power profiles for the straight and forked robot path. They are
// different to let the robot drive more slowly as the robot approaches
// the fork on the forked line case.
double forkProfile[] = {0.70, 0.70, 0.55, 0.60, 0.60, 0.50, 0.40, 0.00};
double straightProfile[] = {0.7, 0.7, 0.6, 0.6, 0.35, 0.35, 0.35, 0.0};
double powerProfile[]; // the selected power profile
// set the straightLine and left-right variables depending on chosen path
boolean straightLine = ds.getDigitalIn(1);
powerProfile = (straightLine) ? straightProfile : forkProfile;
double stopTime = (straightLine) ? 2.0 : 4.0; // when the robot should look for end
boolean goLeft = !ds.getDigitalIn(2) && !straightLine;
System.out.println("StraightLine: " + straightLine);
System.out.println("GoingLeft: " + goLeft);
boolean atCross = false; // if robot has arrived at end
// time the path over the line
Timer timer = new Timer();
timer.start();
timer.reset();
int oldTimeInSeconds = -1;
double time;
double speed, turn;
// loop until robot reaches "T" at end or 8 seconds has past
while ((time = timer.get()) < 8.0 && !atCross) {
int timeInSeconds = (int) time;
// read the sensors
int leftValue = leftSensor.get() ? 1 : 0;
int middleValue = middleSensor.get() ? 1 : 0;
int rightValue = rightSensor.get() ? 1 : 0;
// compute the single value from the 3 sensors. Notice that the bits
// for the outside sensors are flipped depending on left or right
// fork. Also the sign of the steering direction is different for left/right.
if (goLeft) {
binaryValue = leftValue * 4 + middleValue * 2 + rightValue;
steeringGain = -DEFAULT_STEERING_GAIN;
} else {
binaryValue = rightValue * 4 + middleValue * 2 + leftValue;
steeringGain = DEFAULT_STEERING_GAIN;
}
// get the default speed and turn rate at this time
speed = powerProfile[timeInSeconds];
turn = 0;
// different cases for different line tracking sensor readings
switch (binaryValue) {
case 1: // on line edge
turn = 0;
break;
case 7: // all sensors on (maybe at cross)
if (time > stopTime) {
atCross = true;
speed = 0;
}
break;
case 0: // all sensors off
if (previousValue == 0 || previousValue == 1) {
turn = steeringGain;
} else {
turn = -steeringGain;
}
break;
default: // all other cases
turn = -steeringGain;
}
// print current status for debugging
if (binaryValue != previousValue) {
System.out.println("Time: " + time + " Sensor: " + binaryValue + " speed: " + speed + " turn: " + turn + " atCross: " + atCross);
}
// set the robot speed and direction
robotDrive.arcadeDrive(speed, turn);
pidLeft.setSetpoint(storageLeft.get() * SLOW_MAX_ENCODER_RATE);
pidRight.setSetpoint(storageRight.get() * SLOW_MAX_ENCODER_RATE);
if (binaryValue != 0) {
previousValue = binaryValue;
}
oldTimeInSeconds = timeInSeconds;
Timer.delay(0.01);
}
// Done with loop - stop the robot. Robot ought to be at the end of the line
pidLeft.setSetpoint(0.0);
pidRight.setSetpoint(0.0);
}
else {
//Our current step in our list of steps
Step currentStep = stepList[stepIndex];
//The last step we did
int lastStepIndex = stepIndex;
//If we have a step to do
if(currentStep != null) {
//Switch the type of step
switch(currentStep.type) {
//If we want to drive forward
case AutonomousState.Driving:
int direction = currentStep.get() > 0 ? 1 : currentStep.get() < 0 ? -1 : 0;
//If we have reached our value for this step on the left or right side
boolean leftDone = false;
boolean rightDone = false;
if(direction == 1) {
final double distance = ultrasonicSensor.getVoltage() / ULTRASONIC_VOLTS_PER_INCH;
leftDone = -encLeft.encoder.get() >= currentStep.get() || distance <= MAX_SCORING_DISTANCE;
rightDone = encRight.encoder.get() >= currentStep.get() || distance <= MAX_SCORING_DISTANCE;
}
else if (direction == -1) {
leftDone = -encLeft.encoder.get() <= currentStep.get();
rightDone = encRight.encoder.get() <= currentStep.get();
if(-encLeft.encoder.get() <= currentStep.get() * 0.25 && encRight.encoder.get() <= currentStep.get() * 0.25)
setElbow(ElbowState.Vertical);
}
//Drive each side until we reach the value for each side
robotDrive.arcadeDrive(direction * 0.85, gyroPID(true, 0.0));
if(!leftDone)
pidLeft.setSetpoint(-storageLeft.get() * SLOW_MAX_ENCODER_RATE);
else
pidLeft.disable();
if(!rightDone)
pidRight.setSetpoint(-storageRight.get() * SLOW_MAX_ENCODER_RATE);
else
pidRight.disable();
//If the value is reached
if(elevatorPID() && leftDone && rightDone)
++stepIndex;
break;
//If we want to turn
case AutonomousState.Turning:
//Disable PIDs for smoother turning
if(pidLeft.isEnable() || pidRight.isEnable()) {
pidLeft.disable();
pidRight.disable();
}
if(false) {
//Set the setpoint for the gyro PID to the step's setpoint
pidGyro.setSetpoint(currentStep.get());
//Drive the motors with the output from the gyro PID
jagLeft.set(-gyroOutput.get());
jagRight.set(-gyroOutput.get());
//Difference between our position and our setpoint
final double delta = currentStep.get() - gyro.pidGet();
//If the gyro is below or above the target angle depending on the direction we are turning
if(Math.abs(delta) < GYRO_TOLERANCE)
++gyroCounter;
if(gyroCounter >= 10) {
++stepIndex;
pidLeft.enable();
pidRight.enable();
}
} else {
gyroPID(false, currentStep.get());
}
break;
//To release the tube
case AutonomousState.Release:
if(releaseTube) {
pidLeft.disable();
pidRight.disable();
setElbow(ElbowState.Middle);
Timer.delay(AUTONOMOUS_RELEASE_DELAY);
releaseTube();
elevatorSetpoint = ElevatorSetpoint.ground;
}
++stepIndex;
break;
//If we are done our autonomous mode
case AutonomousState.Done:
pidLeft.disable();
pidRight.disable();
break;
//Sleep state
case AutonomousState.Sleep:
double time = currentStep.get();
pidLeft.disable();
pidRight.disable();
while(time > 0) {
print("Autonomous");
Timer.delay(1);
--time;
Watchdog.getInstance().feed();
}
pidLeft.enable();
pidLeft.enable();
++stepIndex;
break;
default:
++stepIndex;
break;
}
}
//If we want to go to the next step
if(lastStepIndex != stepIndex) {
//Reset everything
encLeft.reset();
encRight.reset();
gyro.reset();
jagElevator.set(0.0);
//Stop
pidLeft.enable();
pidRight.enable();
pidLeft.setSetpoint(0.0);
pidRight.setSetpoint(0.0);
//Reset gyro counter to 0
gyroCounter = 0;
System.out.println("Step: " + stepIndex);
}
}
}
//Start time for teleoperated mode
double teleopStartTime;
int lightState = Lights.Off;
//Runs at the beginning of teleoperated period
public void teleopInit() {
//Initialize variables
teleopStartTime = Timer.getFPGATimestamp();
//Minibot defaults to in
minibotRelease.set(false);
minibotServo.set(0);
manualElevatorToggle.set(false);
gripperToggle.set(false);
arcadeToggle.set(true);
minibotToggle.set(false);
}
//Runs periodically during teleoperated period
public void teleopPeriodic() {
//Call our print function with the current mode
print("Teleoperated");
}
int lastColor = Lights.Off;
//Runs continuously during teleoperated period
public void teleopContinuous() {
//Don't allow the gyro to be more or less than 360 degrees
if(gyro.pidGet() < -360 || gyro.pidGet() > 360)
gyro.reset();
boolean finale = Timer.getFPGATimestamp() - teleopStartTime >= MINIBOT_RELEASE_TIME;
final double distance = ultrasonicSensor.getVoltage() / ULTRASONIC_VOLTS_PER_INCH;
boolean flashCurrentColor = false;
if(!gripper.get()) { //trying to score
if(distance > MIN_SCORING_DISTANCE && distance < MAX_SCORING_DISTANCE)
flashCurrentColor = true;
}
else if(elevatorSetpoint == ElevatorSetpoint.feed) { //gripper is open and feed position
if(distance > MIN_FEEDING_DISTANCE && distance < MAX_FEEDING_DISTANCE)
flashCurrentColor = true;
}
if(stickOperator.getRawButton(Operator.LIGHT_SELECTION)) {
if(stickOperator.getRawButton(Operator.LIGHT_RED))
lightState = Lights.Red;
if(stickOperator.getRawButton(Operator.LIGHT_WHITE))
lightState = Lights.White;
if(stickOperator.getRawButton(Operator.LIGHT_BLUE))
lightState = Lights.Blue;
if(stickOperator.getRawButton(Operator.LIGHT_OFF))
lightState = Lights.Off;
}
else {
//The elevator setpoint based on the corresponding button
elevatorSetpoint = stickOperator.getRawButton(Operator.ELEVATOR_STATE_GROUND) ? ElevatorSetpoint.ground : elevatorSetpoint;
elevatorSetpoint = stickOperator.getRawButton(Operator.ELEVATOR_STATE_ONE) ? ElevatorSetpoint.posOne : elevatorSetpoint;
elevatorSetpoint = stickOperator.getRawButton(Operator.ELEVATOR_STATE_TWO) ? ElevatorSetpoint.posTwo : elevatorSetpoint;
elevatorSetpoint = stickOperator.getRawButton(Operator.ELEVATOR_STATE_THREE) ? ElevatorSetpoint.posThree : elevatorSetpoint;
elevatorSetpoint = stickOperator.getRawButton(Operator.ELEVATOR_STATE_FOUR) ? ElevatorSetpoint.posFour : elevatorSetpoint;
elevatorSetpoint = stickOperator.getRawButton(Operator.ELEVATOR_STATE_FIVE) ? ElevatorSetpoint.posFive : elevatorSetpoint;
elevatorSetpoint = stickOperator.getRawButton(Operator.ELEVATOR_STATE_SIX) ? ElevatorSetpoint.posSix : elevatorSetpoint;
elevatorSetpoint = !finale /*this is one of the minibot release buttons*/ && stickOperator.getRawButton(Operator.ELEVATOR_STATE_FEED) ? ElevatorSetpoint.feed : elevatorSetpoint;
}
flashLED(finale, flashCurrentColor);
//Disable manual elevator toggle during the finale
manualElevatorToggle.feed(!finale && stickOperator.getRawButton(Operator.ELEVATOR_MANUAL_TOGGLE));
//Manual or automated elevator control
if(manualElevatorToggle.get()) {
double axis = stickOperator.getAxis(Joystick.AxisType.kY);
if(!elevatorLimit.get())
axis = Math.max(axis, 0);
if(axis > 0)
axis *= ELEVATOR_SPEED_DOWN;
jagElevator.set(axis);
} else {
elevatorPID();
}
//Minus because the left encoder is negative
double rate = Math.abs((encRight.pidGet() - encLeft.pidGet()) / 2);
final double LOW_SPEED_PERCENT = 0.9;
final double HIGH_SPEED_PERCENT = 0.6;
if(elbowState == ElbowState.Vertical) {
if(!transState)
transState = rate >= LOW_SPEED_PERCENT * SLOW_MAX_ENCODER_RATE && Math.abs(stickDriver.getRawAxis(Driver.Y_AXIS_LEFT)) >= LOW_SPEED_PERCENT ? true : transState;
else if(transState)
transState = rate <= HIGH_SPEED_PERCENT * FAST_MAX_ENCODER_RATE && Math.abs(stickDriver.getRawAxis(Driver.Y_AXIS_LEFT)) <= HIGH_SPEED_PERCENT ? false : transState;
transState = stickDriver.getRawButton(Driver.TRANS_TOGGLE_LOW) ? false : transState;
transState = stickDriver.getRawButton(Driver.TRANS_TOGGLE_HIGH) ? true : transState;
}
else
transState = false; //Low gear
//Set the transmission shifter to open or closed based on the state of the toggle
transShift.set(PRACTISE_ROBOT ? transState : !transState);
//Determine the input range to use (max encoder rate) to use depending on the transmission state we are in
double maxEncoderRate = transState ? FAST_MAX_ENCODER_RATE : SLOW_MAX_ENCODER_RATE;
pidLeft.setInputRange(-maxEncoderRate, maxEncoderRate);
pidRight.setInputRange(-maxEncoderRate, maxEncoderRate);
//Feed the toggle on the gripper button
gripperToggle.feed(stickOperator.getRawButton(Operator.GRIPPER_TOGGLE));
//Set the gripper to open or closed based on the state of the toggle
if(elbowState != ElbowState.Vertical)
gripper.set(gripperToggle.get());
double elbowInput = -stickOperator.getAxis(Joystick.AxisType.kThrottle);
if(elbowInput < -0.5)
elbowState = ElbowState.Horizontal;
else if(elbowInput > 0.5)
elbowState = ElbowState.Vertical;
else
elbowState = ElbowState.Middle;
setElbow(elbowState);
if(elbowState == ElbowState.Horizontal && lastElbowState == ElbowState.Middle)
gripperToggle.set(true);
lastElbowState = elbowState;
//Feed the toggle on the arcade/tank drive button
//arcadeToggle.feed(stickDriver.getRawButton(Driver.ARCADE_TOGGLE));
final boolean doPID = false;
//Drive arcade or tank based on the state of the toggle
if(arcadeToggle.get()) {
//If PID is disabled
if((!pidLeft.isEnable() || !pidRight.isEnable()) && doPID) {
//Enable PID
pidLeft.enable();
pidRight.enable();
}
if((pidLeft.isEnable() || pidRight.isEnable()) && !doPID) {
pidLeft.disable();
pidRight.disable();
}
double driveAxis = stickDriver.getRawAxis(Driver.Y_AXIS_LEFT);
driveAxis = Math.abs(driveAxis) < 0.2 ? 0.0 : driveAxis;
double turnAxis = stickDriver.getRawAxis(Driver.X_AXIS_RIGHT);
turnAxis = Math.abs(turnAxis) < 0.2 ? 0.0 : turnAxis;
//Let the robotdrive class calculate arcade drive for us
robotDrive.arcadeDrive(driveAxis, turnAxis);
if(doPID) {
pidLeft.setSetpoint(storageLeft.get() * maxEncoderRate);
pidRight.setSetpoint(storageRight.get() * maxEncoderRate);
}
else {
jagLeft.set(storageLeft.get());
jagRight.set(storageRight.get());
}
}
else {
//If PID is disabled
if((!pidLeft.isEnable() || !pidRight.isEnable()) && doPID) {
//Enable PID
pidLeft.enable();
pidRight.enable();
}
if((pidLeft.isEnable() || pidRight.isEnable()) && !doPID) {
pidLeft.disable();
pidRight.disable();
}
//Left axis
double leftAxis = stickDriver.getRawAxis(Driver.Y_AXIS_LEFT);
//Any value less than 0.2 is set to 0.0 to create a dead zone
leftAxis = Math.abs(leftAxis) < 0.2 ? 0.0 : leftAxis;
//Right axis
double rightAxis = stickDriver.getRawAxis(Driver.Y_AXIS_RIGHT);
//Any value less than 0.2 is set to 0.0 to create a dead zone
rightAxis = Math.abs(rightAxis) < 0.2 ? 0.0 : rightAxis;
if(doPID) {
//Set the setpoint as a percentage of the maximum encoder rate
pidLeft.setSetpoint(leftAxis * maxEncoderRate);
pidRight.setSetpoint(-rightAxis * maxEncoderRate);
}
else {
jagLeft.set(leftAxis);
jagRight.set(-rightAxis);
}
}
//If there are 10 seconds left
if(finale) {
minibotToggle.feed(stickOperator.getRawButton(Operator.MINIBOT_RELEASE_ONE) && stickOperator.getRawButton(Operator.MINIBOT_RELEASE_TWO));
minibotRelease.set(minibotToggle.get());
if(minibotToggle.get() && !minibotLimit.get()) //Minibot limit switch is engaged when false
minibotServo.set(255);
}
}
//Returns whether or not the setpoint has been reached
public boolean elevatorPID() {
//Difference between setpoint and our position
final double error = elevatorSetpoint - encElevator.pidGet();
//We can be off by 5%
final double toleranceWhileGoingUp = MAX_ELEVATOR_COUNTS * 0.05;
final double toleranceWhileGoingDown = -MAX_ELEVATOR_COUNTS * 0.05;
//Go up when below setpoint, down when above setpoint
if(error > 0 && error > toleranceWhileGoingUp)
jagElevator.set(ELEVATOR_SPEED_UP);
else if(error < 0 && error < toleranceWhileGoingDown && elevatorLimit.get()) //Cant go down unless elevator limit is disengaged
jagElevator.set(-ELEVATOR_SPEED_DOWN);
else {
jagElevator.set(0.0);
return true;
}
return false;
}
public void setElbow(int state) {
//Update the elbow state
elbowState = state;
//For the elbow pneumatics, closed = true open = false
//The top elbow is only ever closed in the vertical state
elbowTop.set(elbowState != ElbowState.Vertical);
//The bottom elbow is only ever open in the horizontal state
elbowBottom.set(elbowState == ElbowState.Horizontal);
//If we are vertical then close the gripper
if(elbowState == ElbowState.Vertical) {
gripper.set(false);
gripperToggle.set(false);
}
}
//Number of times our setpoint has been reached
int gyroCounter;
public double gyroPID(boolean returnSpeed, double target) {
//Use our own calculations to get to the setpoint of the gyro
final double delta = target - gyro.getAngle();
//For straight driving in autonomous mode
if(returnSpeed) {
if(Math.abs(delta) < GYRO_TOLERANCE)
++gyroCounter;
if(gyroCounter >= 100) {
gyroCounter = 0;
return 0.0;
}
//The speed is incorporated into straight driving so it has to be low
final double speed = 0.1;
return delta > 0 ? -speed :speed;
}
//For turning on the spot
else {
if(Math.abs(delta) < GYRO_TOLERANCE)
++gyroCounter;
if(gyroCounter >= 100)
++stepIndex;
//We are turning on the spot so the turning speed is high
final double speed = 0.85;
jagLeft.set(delta >= 0 ? -speed : speed);
jagRight.set(delta >= 0 ? -speed : speed);
return 0.0;
}
}
public void releaseTube() {
setElbow(ElbowState.Horizontal);
gripper.set(true);
//Toggle the gripper to be open at the beginning of teleop
gripperToggle.set(true);
}
double flashTime = 0;
boolean flash = false;
public void flashLED(boolean finale, boolean flashingColor) {
double now = Timer.getFPGATimestamp();
if(now - flashTime > FLASH_TIME) {
flash = !flash;
flashTime = now;
}
if(finale) {
if(flash) {
lightsOne.set(false);
lightsTwo.set(true);
}
else {
lightsOne.set(true);
lightsTwo.set(false);
}
}
else {
switch(lightState) {
case Lights.Red:
lightsOne.set(true);
if(flashingColor && !flash)
lightsOne.relay.set(Relay.Value.kOff);
lightsTwo.relay.set(Relay.Value.kOff);
break;
case Lights.White:
lightsOne.set(false);
if(flashingColor && !flash)
lightsOne.relay.set(Relay.Value.kOff);
lightsTwo.relay.set(Relay.Value.kOff);
break;
case Lights.Blue:
lightsOne.relay.set(Relay.Value.kOff);
lightsTwo.set(true);
if(flashingColor && !flash)
lightsTwo.relay.set(Relay.Value.kOff);
break;
case Lights.Off:
//If the lights are off but we want to flash (distance sensor) default to red light
if(flashingColor) {
lightsOne.set(true);
if(!flash)
lightsOne.relay.set(Relay.Value.kOff);
lightsTwo.relay.set(Relay.Value.kOff);
}
else {
lightsOne.relay.set(Relay.Value.kOff);
lightsTwo.relay.set(Relay.Value.kOff);
}
break;
default:
lightsOne.relay.set(Relay.Value.kOff);
lightsTwo.relay.set(Relay.Value.kOff);
break;
}
if(transState) {
lightsOne.relay.set(Relay.Value.kOff);
lightsTwo.set(false);
}
}
}
double lastPrintTime = 0;
//Print function for our variables
public void print(String mode) {
//Current time
final double curPrintTime = Timer.getFPGATimestamp();
//If it has been more than half a second
if(curPrintTime - lastPrintTime > PRINT_DELAY) {
//Make a bunch of newlines to clear the screen to only show the current output
System.out.println("\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
//Print statements
System.out.println("[" + mode + "]");
System.out.println("gripperToggle: " + gripperToggle.get());
System.out.println("renc count: " + encRight.encoder.get() + " lenc count: " + encLeft.encoder.get() + " elevator counts: " + encElevator.pidGet());
System.out.println("rencRate: " + encRight.pidGet() + " lencRate: " + encLeft.pidGet());
System.out.println("rSet: " + pidRight.getSetpoint() + " lSet: " + pidLeft.getSetpoint() + " eSet: " + elevatorSetpoint);
System.out.println("rPID: " + pidRight.get() + " lPID: " + pidLeft.get());
System.out.println("manualElevator: " + manualElevatorToggle.get());
System.out.println("elevAxis: " + stickOperator.getAxis(Joystick.AxisType.kY) + " leftAxis: " + stickDriver.getRawAxis(Driver.Y_AXIS_LEFT) + " rightAxis: " + stickDriver.getRawAxis(Driver.Y_AXIS_RIGHT));
System.out.println("Gyro PIDget: " + gyro.pidGet() + " gyro output storage: " + gyroOutput.get());
System.out.println("jagLeft: " + jagLeft.get() + " jagRight: " + jagRight.get());
System.out.println("elbow input: " + stickOperator.getThrottle() + "elbowState: " + elbowState);
//System.out.println("Raven gyro min: " + gyro.min + " max: " + gyro.max + " deadzone: " + gyro.deadzone + " center: " + gyro.center);
System.out.println("rightSensor: " + rightSensor.get() + " middleSensor: " + middleSensor.get() + " leftSensor: " + leftSensor.get());
System.out.println("light: " + lightState);
System.out.println("ultrasonic distance: " + ultrasonicSensor.getVoltage() / ULTRASONIC_VOLTS_PER_INCH);
System.out.println("limitelev: " + elevatorLimit.get() + " minibotlimit: " + minibotLimit.get());
System.out.println("minibotRelease: " + minibotToggle.get());
//Update the last print time
lastPrintTime = curPrintTime;
}
}
}
| waterloo commit
| src/edu/wpi/first/wpilibj/templates/RobotTemplate.java | waterloo commit |
|
Java | mit | 59fa74870d9fd5be3213490b3d378207017cc695 | 0 | Arcfalt/ExtendAHand | package com.arcfalt.extendahand.item;
import com.arcfalt.extendahand.packet.PacketHandler;
import com.arcfalt.extendahand.utils.ItemUtils;
import com.arcfalt.extendahand.utils.RenderUtils;
import net.minecraft.block.Block;
import net.minecraft.block.material.Material;
import net.minecraft.block.state.IBlockState;
import net.minecraft.client.Minecraft;
import net.minecraft.client.entity.EntityPlayerSP;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.BlockPos;
import net.minecraft.util.EnumChatFormatting;
import net.minecraft.util.MathHelper;
import net.minecraft.util.MovingObjectPosition;
import net.minecraft.world.World;
import net.minecraftforge.client.event.RenderWorldLastEvent;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
import java.util.HashSet;
import java.util.Set;
public class BasePointExtendo extends BaseExtendo
{
static final String LOC = "extendoLoc";
static final String LOC_NEXT = "extendoLocNext";
@Override
@SideOnly(Side.CLIENT)
public void drawHighlight(RenderWorldLastEvent event, EntityPlayerSP player, ItemStack stack)
{
MovingObjectPosition mouseOver = getMouseOver();
BlockPos blockPos = getTargetBlockPos(player, mouseOver);
Set<BlockPos> actingBlocks = actingBlocks(blockPos, mouseOver.sideHit, player.worldObj, player);
RenderUtils.renderBlockOverlays(event, player, actingBlocks, 1f, .8f, 1f, 0.001f);
float targetOffset = 0.006f;
// Draw existing selected points
NBTTagCompound tags = ItemUtils.getOrCreateTagCompound(stack);
String posTag = LOC + 0;
if(tags.hasKey(posTag))
{
Set<BlockPos> loc0pos = new HashSet<BlockPos>();
loc0pos.add(BlockPos.fromLong(tags.getLong(posTag)));
RenderUtils.renderBlockOverlays(event, player, loc0pos, 1f, .3f, .3f, targetOffset);
}
posTag = LOC + 1;
if(tags.hasKey(posTag))
{
Set<BlockPos> loc1pos = new HashSet<BlockPos>();
loc1pos.add(BlockPos.fromLong(tags.getLong(posTag)));
RenderUtils.renderBlockOverlays(event, player, loc1pos, .3f, .3f, 1f, targetOffset);
}
// Find mouseover object
if(blockPos == null) return;
// Draw mouseover point placement
float r = 1f;
float g = .3f;
float b = 1f;
if(tags.hasKey(LOC_NEXT) && tags.getInteger(LOC_NEXT) != 0) r = .3f;
else b = .3f;
Set<BlockPos> positions = new HashSet<BlockPos>();
positions.add(blockPos);
RenderUtils.renderBlockOverlays(event, player, positions, r, g, b, targetOffset + 0.0001f);
}
@Override
public ItemStack onItemRightClick(ItemStack itemStackIn, World worldIn, EntityPlayer playerIn)
{
if(!worldIn.isRemote) return itemStackIn;
Minecraft minecraft = Minecraft.getMinecraft();
MovingObjectPosition mouseOver = minecraft.getRenderViewEntity().rayTrace(90.0, 1f);
// Make sure the target is a valid block
if(mouseOver == null)
{
sendMessage(EnumChatFormatting.AQUA + "No block targeted!", playerIn);
return itemStackIn;
}
BlockPos blockPos = mouseOver.getBlockPos();
if(blockPos == null)
{
sendMessage(EnumChatFormatting.AQUA + "No block targeted!", playerIn);
return itemStackIn;
}
IBlockState blockState = worldIn.getBlockState(blockPos);
Block block = blockState.getBlock();
NBTTagCompound gotTags = itemStackIn.getTagCompound();
if(playerIn.isSneaking() && gotTags != null && gotTags.hasKey(LOC + 0) && gotTags.hasKey(LOC + 1))
{
IBlockState setState = getResourceState(itemStackIn, blockState);
Block useBlock = setState.getBlock();
int meta = useBlock.getMetaFromState(setState);
Set<BlockPos> positions = actingBlocks(blockPos, mouseOver.sideHit, playerIn.worldObj, playerIn);
PacketHandler.sendExtendoPlacement(useBlock, meta, positions);
return itemStackIn;
}
if(block == null || block.getMaterial() == Material.air)
{
sendMessage(EnumChatFormatting.AQUA + "No block targeted!", playerIn);
return itemStackIn;
}
worldIn.playSoundAtEntity(playerIn, "random.bow", 0.5F, 0.4F / (itemRand.nextFloat() * 0.4F + 0.8F));
NBTTagCompound tags;
if(gotTags == null) tags = new NBTTagCompound();
else tags = (NBTTagCompound) gotTags.copy();
int placeIn = 0;
if(tags.hasKey(LOC_NEXT))
{
placeIn = tags.getInteger(LOC_NEXT);
placeIn = MathHelper.clamp_int(placeIn, 0, 1);
}
tags.setLong(LOC + placeIn, blockPos.toLong());
tags.setInteger(LOC_NEXT, 1 - placeIn);
//tags.
PacketHandler.sendExtendoNBT(itemStackIn, tags);
/*
// Get necessary block data
IBlockState setState = getResourceState(itemStackIn, blockState);
Block useBlock = setState.getBlock();
int meta = useBlock.getMetaFromState(setState);
Set<BlockPos> positions = actingBlocks(blockPos, mouseOver.sideHit, worldIn, playerIn);
// Send placement packet
PacketHandler.sendExtendoPlacement(useBlock, meta, positions);
*/
return itemStackIn;
}
}
| src/main/java/com/arcfalt/extendahand/item/BasePointExtendo.java | package com.arcfalt.extendahand.item;
import com.arcfalt.extendahand.packet.PacketHandler;
import com.arcfalt.extendahand.utils.ItemUtils;
import com.arcfalt.extendahand.utils.RenderUtils;
import net.minecraft.block.Block;
import net.minecraft.block.material.Material;
import net.minecraft.block.state.IBlockState;
import net.minecraft.client.Minecraft;
import net.minecraft.client.entity.EntityPlayerSP;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.BlockPos;
import net.minecraft.util.EnumChatFormatting;
import net.minecraft.util.MathHelper;
import net.minecraft.util.MovingObjectPosition;
import net.minecraft.world.World;
import net.minecraftforge.client.event.RenderWorldLastEvent;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
import java.util.HashSet;
import java.util.Set;
public class BasePointExtendo extends BaseExtendo
{
static final String LOC = "extendoLoc";
static final String LOC_NEXT = "extendoLocNext";
@Override
@SideOnly(Side.CLIENT)
public void drawHighlight(RenderWorldLastEvent event, EntityPlayerSP player, ItemStack stack)
{
MovingObjectPosition mouseOver = getMouseOver();
BlockPos blockPos = getTargetBlockPos(player, mouseOver);
Set<BlockPos> actingBlocks = actingBlocks(blockPos, mouseOver.sideHit, player.worldObj, player);
RenderUtils.renderBlockOverlays(event, player, actingBlocks, 1f, .8f, 1f, 0.001f);
float targetOffset = 0.006f;
// Draw existing selected points
NBTTagCompound tags = ItemUtils.getOrCreateTagCompound(stack);
String posTag = LOC + 0;
if(tags.hasKey(posTag))
{
Set<BlockPos> loc0pos = new HashSet<BlockPos>();
loc0pos.add(BlockPos.fromLong(tags.getLong(posTag)));
RenderUtils.renderBlockOverlays(event, player, loc0pos, 1f, .3f, .3f, targetOffset);
}
posTag = LOC + 1;
if(tags.hasKey(posTag))
{
Set<BlockPos> loc1pos = new HashSet<BlockPos>();
loc1pos.add(BlockPos.fromLong(tags.getLong(posTag)));
RenderUtils.renderBlockOverlays(event, player, loc1pos, .3f, .3f, 1f, targetOffset);
}
// Find mouseover object
if(blockPos == null) return;
// Draw mouseover point placement
float r = 1f;
float g = .3f;
float b = 1f;
if(tags.hasKey(LOC_NEXT) && tags.getInteger(LOC_NEXT) != 0) r = .3f;
else b = .3f;
Set<BlockPos> positions = new HashSet<BlockPos>();
positions.add(blockPos);
RenderUtils.renderBlockOverlays(event, player, positions, r, g, b, targetOffset + 0.0001f);
}
@Override
public ItemStack onItemRightClick(ItemStack itemStackIn, World worldIn, EntityPlayer playerIn)
{
if(!worldIn.isRemote) return itemStackIn;
Minecraft minecraft = Minecraft.getMinecraft();
MovingObjectPosition mouseOver = minecraft.getRenderViewEntity().rayTrace(90.0, 1f);
// Make sure the target is a valid block
if(mouseOver == null)
{
sendMessage(EnumChatFormatting.AQUA + "No block targeted!", playerIn);
return itemStackIn;
}
BlockPos blockPos = mouseOver.getBlockPos();
if(blockPos == null)
{
sendMessage(EnumChatFormatting.AQUA + "No block targeted!", playerIn);
return itemStackIn;
}
IBlockState blockState = worldIn.getBlockState(blockPos);
Block block = blockState.getBlock();
if(block == null || block.getMaterial() == Material.air)
{
sendMessage(EnumChatFormatting.AQUA + "No block targeted!", playerIn);
return itemStackIn;
}
worldIn.playSoundAtEntity(playerIn, "random.bow", 0.5F, 0.4F / (itemRand.nextFloat() * 0.4F + 0.8F));
NBTTagCompound gotTags = itemStackIn.getTagCompound();
NBTTagCompound tags;
if(gotTags == null) tags = new NBTTagCompound();
else tags = (NBTTagCompound) gotTags.copy();
int placeIn = 0;
if(tags.hasKey(LOC_NEXT))
{
placeIn = tags.getInteger(LOC_NEXT);
placeIn = MathHelper.clamp_int(placeIn, 0, 1);
}
tags.setLong(LOC + placeIn, blockPos.toLong());
tags.setInteger(LOC_NEXT, 1 - placeIn);
//tags.
PacketHandler.sendExtendoNBT(itemStackIn, tags);
/*
// Get necessary block data
IBlockState setState = getResourceState(itemStackIn, blockState);
Block useBlock = setState.getBlock();
int meta = useBlock.getMetaFromState(setState);
Set<BlockPos> positions = actingBlocks(blockPos, mouseOver.sideHit, worldIn, playerIn);
// Send placement packet
PacketHandler.sendExtendoPlacement(useBlock, meta, positions);
*/
return itemStackIn;
}
}
| Hollow box buildable
| src/main/java/com/arcfalt/extendahand/item/BasePointExtendo.java | Hollow box buildable |
|
Java | mit | 1cffda729f60f82cd52e1e60443b1b1f00580f73 | 0 | jenkinsci/publish-over-dropbox-plugin,rcgroot/jenkins-publish-over-dropbox-plugin,rcgroot/jenkins-publish-over-dropbox-plugin,jenkinsci/publish-over-dropbox-plugin | package org.jenkinsci.plugins.publishoverdropbox.domain.model.requests;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
public class SessionFinish {
@Expose
final public Cursor cursor = new Cursor();
@Expose
final public Commit commit = new Commit();
public static class Commit {
@Expose
private String path;
@Expose
private String mode = "add";
@Expose
@SerializedName("autorename")
private boolean autoRename = false;
@Expose
private boolean mute = false;
public String getPath() {
return path;
}
public void setPath(String path) {
this.path = path;
}
public String getMode() {
return mode;
}
public void setMode(String mode) {
this.mode = mode;
}
public boolean isAutoRename() {
return autoRename;
}
public void setAutoRename(boolean autoRename) {
this.autoRename = autoRename;
}
public boolean isMute() {
return mute;
}
public void setMute(boolean mute) {
this.mute = mute;
}
}
}
| src/main/java/org/jenkinsci/plugins/publishoverdropbox/domain/model/requests/SessionFinish.java | package org.jenkinsci.plugins.publishoverdropbox.domain.model.requests;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
public class SessionFinish {
@Expose
final public Cursor cursor = new Cursor();
@Expose
final public Commit commit = new Commit();
public static class Commit {
@Expose
private String path;
@Expose
private String mode = "add";
@Expose
@SerializedName("autoRename")
private boolean autoRename = false;
@Expose
private boolean mute = false;
public String getPath() {
return path;
}
public void setPath(String path) {
this.path = path;
}
public String getMode() {
return mode;
}
public void setMode(String mode) {
this.mode = mode;
}
public boolean isAutoRename() {
return autoRename;
}
public void setAutoRename(boolean autoRename) {
this.autoRename = autoRename;
}
public boolean isMute() {
return mute;
}
public void setMute(boolean mute) {
this.mute = mute;
}
}
}
| Request field typo, fixes JENKINS-40196
| src/main/java/org/jenkinsci/plugins/publishoverdropbox/domain/model/requests/SessionFinish.java | Request field typo, fixes JENKINS-40196 |
|
Java | mit | 49a103ce6272a51c29119f19951e1d3c92d2ba1d | 0 | rnorth/test-containers,testcontainers/testcontainers-java,testcontainers/testcontainers-java,rnorth/test-containers,rnorth/test-containers,testcontainers/testcontainers-java | package org.testcontainers.containers.wait.internal;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.testcontainers.containers.Container.ExecResult;
import org.testcontainers.containers.ExecInContainerPattern;
import org.testcontainers.containers.wait.strategy.WaitStrategyTarget;
import java.time.Duration;
import java.time.Instant;
import java.util.Set;
import static java.lang.String.format;
/**
* Mechanism for testing that a socket is listening when run from the container being checked.
*/
@RequiredArgsConstructor
@Slf4j
public class InternalCommandPortListeningCheck implements java.util.concurrent.Callable<Boolean> {
private final WaitStrategyTarget waitStrategyTarget;
private final Set<Integer> internalPorts;
@Override
public Boolean call() {
StringBuilder command = new StringBuilder("true");
for (int internalPort : internalPorts) {
command.append(" && ");
command.append(" (");
command.append(format("cat /proc/net/tcp* | awk '{print $2}' | grep -i ':0*%x'", internalPort));
command.append(" || ");
command.append(format("nc -vz -w 1 localhost %d", internalPort));
command.append(" || ");
command.append(format("/bin/bash -c '</dev/tcp/localhost/%d'", internalPort));
command.append(")");
}
Instant before = Instant.now();
try {
ExecResult result = ExecInContainerPattern.execInContainer(waitStrategyTarget.getContainerInfo(), "/bin/sh", "-c", command.toString());
log.trace("Check for {} took {}. Result code '{}', stdout message: '{}'", internalPorts, Duration.between(before, Instant.now()), result.getExitCode(), result.getStdout());
int exitCode = result.getExitCode();
if (exitCode != 0 && exitCode != 1) {
log.warn("An exception while executing the internal check: {}", result);
}
return exitCode == 0;
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
}
| core/src/main/java/org/testcontainers/containers/wait/internal/InternalCommandPortListeningCheck.java | package org.testcontainers.containers.wait.internal;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.testcontainers.containers.Container.ExecResult;
import org.testcontainers.containers.ExecInContainerPattern;
import org.testcontainers.containers.wait.strategy.WaitStrategyTarget;
import java.time.Duration;
import java.time.Instant;
import java.util.Set;
import static java.lang.String.format;
/**
* Mechanism for testing that a socket is listening when run from the container being checked.
*/
@RequiredArgsConstructor
@Slf4j
public class InternalCommandPortListeningCheck implements java.util.concurrent.Callable<Boolean> {
private final WaitStrategyTarget waitStrategyTarget;
private final Set<Integer> internalPorts;
@Override
public Boolean call() {
StringBuilder command = new StringBuilder("true");
for (int internalPort : internalPorts) {
command.append(" && ");
command.append(" (");
command.append(format("cat /proc/net/tcp* | awk '{print $2}' | grep -i ':0*%x'", internalPort));
command.append(" || ");
command.append(format("nc -vz -w 1 localhost %d", internalPort));
command.append(" || ");
command.append(format("/bin/bash -c '</dev/tcp/localhost/%d'", internalPort));
command.append(")");
}
Instant before = Instant.now();
try {
ExecResult result = ExecInContainerPattern.execInContainer(waitStrategyTarget.getContainerInfo(), "/bin/sh", "-c", command.toString());
log.trace("Check for {} took {}. Result code '{}', stdout message: '{}'", internalPorts, Duration.between(before, Instant.now()), result.getExitCode(), result.getStdout());
return result.getExitCode() == 0;
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
}
| Improve logging in InternalCommandPortListeningCheck (#3001)
| core/src/main/java/org/testcontainers/containers/wait/internal/InternalCommandPortListeningCheck.java | Improve logging in InternalCommandPortListeningCheck (#3001) |
|
Java | mit | 8e2e473434d3af03bcf1655c2dc2a1f644e765d0 | 0 | byronka/xenos,byronka/xenos,byronka/xenos,byronka/xenos,byronka/xenos,byronka/xenos,byronka/xenos | package com.renomad.qarma;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.Statement;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.Arrays;
import com.renomad.qarma.File_utilities;
public class Database_access {
private static String CONNECTION_STRING_WITH_DB =
"jdbc:mysql://localhost/test?user=root&password=hictstd!";
private static String CONNECTION_STRING_WITHOUT_DB =
"jdbc:mysql://localhost/?user=root&password=hictstd!";
/**
*Boilerplate code necessary to run the java mysql connector.
*/
public static void register_sql_driver() {
try {
// The newInstance() call is a work around for some
// broken Java implementations
Class.forName("com.mysql.jdbc.Driver").newInstance();
//new com.mysql.jdbc.Driver();
} catch (Exception ex) {
// handle the error
}
}
/**
* Helper to get a Statement, using connection string without db.
* This is used to get a statement before the database is created.
* Opens a connection each time it's run.
* We don't have to worry about SQL injection here, it should only be called by our own code.
* @return A new Statement object.
*/
private static Statement get_a_statement_before_db_exists() throws SQLException {
Connection conn = DriverManager.getConnection(CONNECTION_STRING_WITHOUT_DB);
Statement stmt = conn.createStatement();
return stmt;
}
/**
* Helper to get a PreparedStatement.
*
* Opens a connection each time it's run.
* @return A new PreparedStatement object.
*/
private static PreparedStatement get_a_prepared_statement(String queryText) {
try {
register_sql_driver();
Connection conn = DriverManager.getConnection(CONNECTION_STRING_WITH_DB);
PreparedStatement stmt = conn.prepareStatement(queryText);
return stmt;
} catch (SQLException ex) {
System.out.println("SQLException: " + ex.getMessage());
System.out.println("SQLState: " + ex.getSQLState());
System.out.println("VendorError: " + ex.getErrorCode());
}
return null;
}
/**
*A wrapper for PreparedStatement.executeUpdate(PreparedStatement pstmt)
*
* Opens and closes a connection each time it's run.
* @param pstmt The prepared statement
* @return a ResultSet object that contains the data produced by the query; never null
*/
public static ResultSet execute_query(PreparedStatement pstmt) {
try {
ResultSet result = pstmt.executeQuery();
return result;
} catch (SQLException ex) {
System.out.println("SQLException: " + ex.getMessage());
System.out.println("SQLState: " + ex.getSQLState());
System.out.println("VendorError: " + ex.getErrorCode());
}
return null;
}
/**
*A wrapper for PreparedStatement.executeUpdate(PreparedStatement pstmt)
*
* Opens and closes a connection each time it's run.
* @param pstmt The prepared statement
* @return either (1) the row count for SQL Data Manipulation
* Language (DML) statements or (2) 0 for SQL statements that return nothing
*/
public static int execute_update(PreparedStatement pstmt) {
try {
int result = pstmt.executeUpdate();
return result;
} catch (SQLException ex) {
System.out.println("SQLException: " + ex.getMessage());
System.out.println("SQLState: " + ex.getSQLState());
System.out.println("VendorError: " + ex.getErrorCode());
}
return 0;
}
/**
*A wrapper for PreparedStatement.execute() - used before database exists.
*
* Opens and closes a connection each time it's run.
* @param sqlText the SQL text we will run - it must be a
* single statement. Multiple combined statements will fail.
* @return true if the first result is a ResultSet object; false
* if it is an update count or there are no results
*/
public static boolean run_sql_statement_before_db_exists(String sqlText) {
try (Statement stmt = get_a_statement_before_db_exists()){
boolean result = stmt.execute(sqlText);
return result;
} catch (SQLException ex) {
System.out.println("SQLException: " + ex.getMessage());
System.out.println("SQLState: " + ex.getSQLState());
System.out.println("VendorError: " + ex.getErrorCode());
}
return false;
}
/**
*A wrapper for PreparedStatement.execute(), used for setting up db schemas.
*
* Opens and closes a connection each time it's run.
* @param sqlText the SQL text we will run - it must be a
* single statement. Multiple combined statements will fail.
* @return true if the first result is a ResultSet object; false
* if it is an update count or there are no results
*/
public static boolean run_sql_statement(String sqlText) {
try (PreparedStatement stmt = get_a_prepared_statement(sqlText)){
boolean result = stmt.execute(sqlText);
return result;
} catch (SQLException ex) {
System.out.println("SQLException: " + ex.getMessage());
System.out.println("SQLState: " + ex.getSQLState());
System.out.println("VendorError: " + ex.getErrorCode());
}
return false;
}
public static int add_user(String first_name, String last_name, String email, String password) {
//validation section
if (first_name == null || first_name == "") {
return 0;
}
if (last_name == null || last_name == "") {
return 0;
}
if (email == null || email == "") {
return 0;
}
if (password == null || password == "") {
return 0;
}
String sqlText = "INSERT INTO user (first_name, last_name, email, password) values (?, ?, ?, ?)";
try (PreparedStatement pstmt = get_a_prepared_statement(sqlText)){
pstmt.setString(1, first_name);
pstmt.setString(2, last_name);
pstmt.setString(3, email);
pstmt.setString(4, password);
int result = execute_update(pstmt);
return result;
} catch (SQLException ex) {
System.out.println("SQLException: " + ex.getMessage());
System.out.println("SQLState: " + ex.getSQLState());
System.out.println("VendorError: " + ex.getErrorCode());
}
return 0; //if complete failure, return that we got 0.
}
public static ArrayList<String> get_all_users() {
String sqlText = "SELECT * FROM user";
ArrayList<String> results = new ArrayList<String>();
try (PreparedStatement pstmt = get_a_prepared_statement(sqlText)) {
ResultSet resultSet = execute_query(pstmt);
if (resultSet == null) {
return new ArrayList<String>(Arrays.asList("No users found"));
}
for(;resultSet.next() == true;) {
results.add(resultSet.getNString("user_name"));
}
return results;
} catch (SQLException ex) {
System.out.println("SQLException: " + ex.getMessage());
System.out.println("SQLState: " + ex.getSQLState());
System.out.println("VendorError: " + ex.getErrorCode());
}
return new ArrayList<String>(Arrays.asList("Errors during loading users."));
}
}
| src/com/renomad/qarma/Database_access.java | package com.renomad.qarma;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.Statement;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.Arrays;
import com.renomad.qarma.File_utilities;
public class Database_access {
private static String CONNECTION_STRING_WITH_DB =
"jdbc:mysql://localhost/test?user=qarmauser&password=hictstd!";
private static String CONNECTION_STRING_WITHOUT_DB =
"jdbc:mysql://localhost/?user=qarmauser&password=hictstd!";
/**
*Boilerplate code necessary to run the java mysql connector.
*/
public static void register_sql_driver() {
try {
// The newInstance() call is a work around for some
// broken Java implementations
Class.forName("com.mysql.jdbc.Driver").newInstance();
//new com.mysql.jdbc.Driver();
} catch (Exception ex) {
// handle the error
}
}
/**
* Helper to get a Statement, using connection string without db.
* This is used to get a statement before the database is created.
* Opens a connection each time it's run.
* We don't have to worry about SQL injection here, it should only be called by our own code.
* @return A new Statement object.
*/
private static Statement get_a_statement_before_db_exists() throws SQLException {
Connection conn = DriverManager.getConnection(CONNECTION_STRING_WITHOUT_DB);
Statement stmt = conn.createStatement();
return stmt;
}
/**
* Helper to get a PreparedStatement.
*
* Opens a connection each time it's run.
* @return A new PreparedStatement object.
*/
private static PreparedStatement get_a_prepared_statement(String queryText) {
try {
register_sql_driver();
Connection conn = DriverManager.getConnection(CONNECTION_STRING_WITH_DB);
PreparedStatement stmt = conn.prepareStatement(queryText);
return stmt;
} catch (SQLException ex) {
System.out.println("SQLException: " + ex.getMessage());
System.out.println("SQLState: " + ex.getSQLState());
System.out.println("VendorError: " + ex.getErrorCode());
}
return null;
}
/**
*A wrapper for PreparedStatement.executeUpdate(PreparedStatement pstmt)
*
* Opens and closes a connection each time it's run.
* @param pstmt The prepared statement
* @return a ResultSet object that contains the data produced by the query; never null
*/
public static ResultSet execute_query(PreparedStatement pstmt) {
try {
ResultSet result = pstmt.executeQuery();
return result;
} catch (SQLException ex) {
System.out.println("SQLException: " + ex.getMessage());
System.out.println("SQLState: " + ex.getSQLState());
System.out.println("VendorError: " + ex.getErrorCode());
}
return null;
}
/**
*A wrapper for PreparedStatement.executeUpdate(PreparedStatement pstmt)
*
* Opens and closes a connection each time it's run.
* @param pstmt The prepared statement
* @return either (1) the row count for SQL Data Manipulation
* Language (DML) statements or (2) 0 for SQL statements that return nothing
*/
public static int execute_update(PreparedStatement pstmt) {
try {
int result = pstmt.executeUpdate();
return result;
} catch (SQLException ex) {
System.out.println("SQLException: " + ex.getMessage());
System.out.println("SQLState: " + ex.getSQLState());
System.out.println("VendorError: " + ex.getErrorCode());
}
return 0;
}
/**
*A wrapper for PreparedStatement.execute() - used before database exists.
*
* Opens and closes a connection each time it's run.
* @param sqlText the SQL text we will run - it must be a
* single statement. Multiple combined statements will fail.
* @return true if the first result is a ResultSet object; false
* if it is an update count or there are no results
*/
public static boolean run_sql_statement_before_db_exists(String sqlText) {
try (Statement stmt = get_a_statement_before_db_exists()){
boolean result = stmt.execute(sqlText);
return result;
} catch (SQLException ex) {
System.out.println("SQLException: " + ex.getMessage());
System.out.println("SQLState: " + ex.getSQLState());
System.out.println("VendorError: " + ex.getErrorCode());
}
return false;
}
/**
*A wrapper for PreparedStatement.execute(), used for setting up db schemas.
*
* Opens and closes a connection each time it's run.
* @param sqlText the SQL text we will run - it must be a
* single statement. Multiple combined statements will fail.
* @return true if the first result is a ResultSet object; false
* if it is an update count or there are no results
*/
public static boolean run_sql_statement(String sqlText) {
try (PreparedStatement stmt = get_a_prepared_statement(sqlText)){
boolean result = stmt.execute(sqlText);
return result;
} catch (SQLException ex) {
System.out.println("SQLException: " + ex.getMessage());
System.out.println("SQLState: " + ex.getSQLState());
System.out.println("VendorError: " + ex.getErrorCode());
}
return false;
}
public static int add_user(String first_name, String last_name, String email, String password) {
//validation section
if (first_name == null || first_name == "") {
return 0;
}
if (last_name == null || last_name == "") {
return 0;
}
if (email == null || email == "") {
return 0;
}
if (password == null || password == "") {
return 0;
}
String sqlText = "INSERT INTO user (first_name, last_name, email, password) values (?, ?, ?, ?)";
try (PreparedStatement pstmt = get_a_prepared_statement(sqlText)){
pstmt.setString(1, first_name);
pstmt.setString(2, last_name);
pstmt.setString(3, email);
pstmt.setString(4, password);
int result = execute_update(pstmt);
return result;
} catch (SQLException ex) {
System.out.println("SQLException: " + ex.getMessage());
System.out.println("SQLState: " + ex.getSQLState());
System.out.println("VendorError: " + ex.getErrorCode());
}
return 0; //if complete failure, return that we got 0.
}
public static ArrayList<String> get_all_users() {
String sqlText = "SELECT * FROM user";
ArrayList<String> results = new ArrayList<String>();
try (PreparedStatement pstmt = get_a_prepared_statement(sqlText)) {
ResultSet resultSet = execute_query(pstmt);
if (resultSet == null) {
return new ArrayList<String>(Arrays.asList("No users found"));
}
for(;resultSet.next() == true;) {
results.add(resultSet.getNString("user_name"));
}
return results;
} catch (SQLException ex) {
System.out.println("SQLException: " + ex.getMessage());
System.out.println("SQLState: " + ex.getSQLState());
System.out.println("VendorError: " + ex.getErrorCode());
}
return new ArrayList<String>(Arrays.asList("Errors during loading users."));
}
}
| change user to root, why not
| src/com/renomad/qarma/Database_access.java | change user to root, why not |
|
Java | mit | f2109cf99e67f479ff7a18219eaf307f93ea56e7 | 0 | igvteam/igv,igvteam/igv,amwenger/igv,amwenger/igv,amwenger/igv,igvteam/igv,igvteam/igv,itenente/igv,igvteam/igv,godotgildor/igv,godotgildor/igv,godotgildor/igv,itenente/igv,itenente/igv,godotgildor/igv,amwenger/igv,itenente/igv,godotgildor/igv,amwenger/igv,itenente/igv | /*
* Copyright (c) 2007-2013 The Broad Institute, Inc.
* SOFTWARE COPYRIGHT NOTICE
* This software and its documentation are the copyright of the Broad Institute, Inc. All rights are reserved.
*
* This software is supplied without any warranty or guaranteed support whatsoever. The Broad Institute is not responsible for its use, misuse, or functionality.
*
* This software is licensed under the terms of the GNU Lesser General Public License (LGPL),
* Version 2.1 which is available at http://www.opensource.org/licenses/lgpl-2.1.php.
*/
package org.broad.igv.sam.reader;
//~--- non-JDK imports --------------------------------------------------------
//~--- JDK imports ------------------------------------------------------------
import net.sf.samtools.Cigar;
import net.sf.samtools.CigarElement;
import net.sf.samtools.CigarOperator;
import net.sf.samtools.SAMRecord;
import net.sf.samtools.util.StringUtil;
import org.apache.log4j.Logger;
import org.broad.igv.DirectoryManager;
import org.broad.igv.Globals;
import org.broad.igv.ui.IGV;
import org.broad.igv.util.FileUtils;
import java.io.File;
import java.io.IOException;
/**
* @author jrobinso
*/
public class SamUtils {
private static Logger log = Logger.getLogger(SamUtils.class);
private static final byte ZERO_BYTE = "0".getBytes()[0];
private static final byte NINE_BYTE = "9".getBytes()[0];
public static FeatureIndex getIndexFor(String samPath) {
String idxPath = samPath + ".sai";
if (FileUtils.resourceExists(idxPath)) {
return new FeatureIndex(idxPath);
} else if (FileUtils.isRemote(idxPath)) {
return null;
} else {
File idxFile = new File(idxPath);
File samFile = new File(samPath);
if (!idxFile.exists()) {
idxFile = getUserIdxFile(samFile);
}
if (idxFile.exists() && idxFile.lastModified() > samFile.lastModified()) {
return new FeatureIndex(idxFile);
} else {
return createIndexFor(samFile);
}
}
}
private static FeatureIndex createIndexFor(File samFile) {
File newIdxFile = new File(samFile.getAbsolutePath() + ".sai");
if (!FileUtils.canWriteTo(newIdxFile)) {
newIdxFile = getUserIdxFile(samFile);
}
if (!Globals.isHeadless()) {
SamIndexCreatorDialog dialog = new SamIndexCreatorDialog(IGV.getMainFrame(), true, samFile, newIdxFile);
dialog.setLocationRelativeTo(IGV.getMainFrame());
dialog.setVisible(true);
return dialog.getIndex();
} else {
AlignmentIndexer indexer = AlignmentIndexer.getInstance(samFile, null, null);
FeatureIndex index = null;
try {
log.info("Creating index " + newIdxFile.getAbsolutePath());
index = indexer.createSamIndex(newIdxFile);
} catch (IOException e) {
e.fillInStackTrace();
}
return index;
}
}
private static File getUserIdxFile(File samFile) {
File idxFile;
File samDir = DirectoryManager.getSamDirectory();
//Need the path information to distinguish like name indices in separate
// directories.
idxFile = new File(samDir, samFile.getName() + "_" + samFile.getParent().hashCode() + ".sai");
return idxFile;
}
public static int getPaddedReferenceLength(String cigarString) {
return decodeCigar(cigarString).getPaddedReferenceLength();
}
/**
* Convert from String CIGAR representation to Cigar class representation. Does not
* do validation beyond the most basic CIGAR string well-formedness, i.e. each operator is
* valid, and preceded by a decimal length.
*
* @param textCigar CIGAR in String form ala SAM text file. "*" means empty CIGAR.
* @throws RuntimeException if textCigar is invalid at the most basic level.
*/
static Cigar decodeCigar(final String textCigar) {
if (SAMRecord.NO_ALIGNMENT_CIGAR.equals(textCigar)) {
return new Cigar();
}
final Cigar ret = new Cigar();
final byte[] cigarBytes = StringUtil.stringToBytes(textCigar);
for (int i = 0; i < cigarBytes.length; ++i) {
if (!isDigit(cigarBytes[i])) {
throw new IllegalArgumentException("Malformed CIGAR string: " + textCigar);
}
int length = (cigarBytes[i] - ZERO_BYTE);
for (++i; isDigit(cigarBytes[i]); ++i) {
length = (length * 10) + cigarBytes[i] - ZERO_BYTE;
}
final CigarOperator operator = CigarOperator.characterToEnum(cigarBytes[i]);
ret.add(new CigarElement(length, operator));
}
return ret;
}
private static boolean isDigit(final byte c) {
return c >= ZERO_BYTE && c <= NINE_BYTE;
}
}
| src/org/broad/igv/sam/reader/SamUtils.java | /*
* Copyright (c) 2007-2011 by The Broad Institute of MIT and Harvard. All Rights Reserved.
*
* This software is licensed under the terms of the GNU Lesser General Public License (LGPL),
* Version 2.1 which is available at http://www.opensource.org/licenses/lgpl-2.1.php.
*
* THE SOFTWARE IS PROVIDED "AS IS." THE BROAD AND MIT MAKE NO REPRESENTATIONS OR
* WARRANTES OF ANY KIND CONCERNING THE SOFTWARE, EXPRESS OR IMPLIED, INCLUDING,
* WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
* PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER
* OR NOT DISCOVERABLE. IN NO EVENT SHALL THE BROAD OR MIT, OR THEIR RESPECTIVE
* TRUSTEES, DIRECTORS, OFFICERS, EMPLOYEES, AND AFFILIATES BE LIABLE FOR ANY DAMAGES
* OF ANY KIND, INCLUDING, WITHOUT LIMITATION, INCIDENTAL OR CONSEQUENTIAL DAMAGES,
* ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER
* THE BROAD OR MIT SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT
* SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*/
package org.broad.igv.sam.reader;
//~--- non-JDK imports --------------------------------------------------------
//~--- JDK imports ------------------------------------------------------------
import net.sf.samtools.Cigar;
import net.sf.samtools.CigarElement;
import net.sf.samtools.CigarOperator;
import net.sf.samtools.SAMRecord;
import net.sf.samtools.util.StringUtil;
import org.broad.igv.DirectoryManager;
import org.broad.igv.Globals;
import org.broad.igv.ui.IGV;
import org.broad.igv.util.FileUtils;
import java.io.File;
import java.io.IOException;
/**
* @author jrobinso
*/
public class SamUtils {
private static final byte ZERO_BYTE = "0".getBytes()[0];
private static final byte NINE_BYTE = "9".getBytes()[0];
public static FeatureIndex getIndexFor(String samPath) {
String idxPath = samPath + ".sai";
if (FileUtils.resourceExists(idxPath)) {
return new FeatureIndex(idxPath);
} else if (FileUtils.isRemote(idxPath)) {
return null;
} else {
File idxFile = new File(idxPath);
File samFile = new File(samPath);
if (!idxFile.exists()) {
idxFile = getUserIdxFile(samFile);
}
if (idxFile.exists() && idxFile.lastModified() > samFile.lastModified()) {
return new FeatureIndex(idxFile);
} else {
return createIndexFor(samFile);
}
}
}
private static FeatureIndex createIndexFor(File samFile) {
File newIdxFile = new File(samFile.getAbsolutePath() + ".sai");
if (!FileUtils.canWriteTo(newIdxFile)) {
newIdxFile = getUserIdxFile(samFile);
}
if (!Globals.isHeadless()) {
SamIndexCreatorDialog dialog = new SamIndexCreatorDialog(IGV.getMainFrame(), true, samFile, newIdxFile);
dialog.setLocationRelativeTo(IGV.getMainFrame());
dialog.setVisible(true);
return dialog.getIndex();
} else {
AlignmentIndexer indexer = AlignmentIndexer.getInstance(samFile, null, null);
FeatureIndex index = null;
try {
System.out.println("Creating index");
index = indexer.createSamIndex(newIdxFile);
} catch (IOException e) {
e.fillInStackTrace();
}
return index;
}
}
private static File getUserIdxFile(File samFile) {
File idxFile;
File samDir = DirectoryManager.getSamDirectory();
//Need the path information to distinguish like name indices in separate
// directories.
idxFile = new File(samDir, samFile.getName() + "_" + samFile.getParent().hashCode() + ".sai");
return idxFile;
}
public static int getPaddedReferenceLength(String cigarString) {
return decodeCigar(cigarString).getPaddedReferenceLength();
}
/**
* Convert from String CIGAR representation to Cigar class representation. Does not
* do validation beyond the most basic CIGAR string well-formedness, i.e. each operator is
* valid, and preceded by a decimal length.
*
* @param textCigar CIGAR in String form ala SAM text file. "*" means empty CIGAR.
* @throws RuntimeException if textCigar is invalid at the most basic level.
*/
static Cigar decodeCigar(final String textCigar) {
if (SAMRecord.NO_ALIGNMENT_CIGAR.equals(textCigar)) {
return new Cigar();
}
final Cigar ret = new Cigar();
final byte[] cigarBytes = StringUtil.stringToBytes(textCigar);
for (int i = 0; i < cigarBytes.length; ++i) {
if (!isDigit(cigarBytes[i])) {
throw new IllegalArgumentException("Malformed CIGAR string: " + textCigar);
}
int length = (cigarBytes[i] - ZERO_BYTE);
for (++i; isDigit(cigarBytes[i]); ++i) {
length = (length * 10) + cigarBytes[i] - ZERO_BYTE;
}
final CigarOperator operator = CigarOperator.characterToEnum(cigarBytes[i]);
ret.add(new CigarElement(length, operator));
}
return ret;
}
private static boolean isDigit(final byte c) {
return c >= ZERO_BYTE && c <= NINE_BYTE;
}
}
| Use log instead of System.out
| src/org/broad/igv/sam/reader/SamUtils.java | Use log instead of System.out |
|
Java | mit | 08584c1f2973a6def044e912570f225feb7471ed | 0 | johanbrook/watchme | package se.chalmers.watchmetest.activity;
import se.chalmers.watchme.R;
import se.chalmers.watchme.activity.MainActivity;
import se.chalmers.watchme.activity.TabsAdapter;
import se.chalmers.watchme.ui.MovieListFragment;
import se.chalmers.watchme.ui.TagListFragment;
import android.annotation.TargetApi;
import android.app.ActionBar;
import android.app.ActionBar.Tab;
import android.support.v4.app.Fragment;
import android.support.v4.view.ViewPager;
import android.test.ActivityInstrumentationTestCase2;
import android.test.UiThreadTest;
import com.jayway.android.robotium.solo.Solo;
public class MainActivityTest extends
ActivityInstrumentationTestCase2<MainActivity> {
private Solo solo;
public MainActivityTest() {
super(MainActivity.class);
}
@Override
protected void setUp() throws Exception {
super.setUp();
solo = new Solo(getInstrumentation(), getActivity());
}
//TODO: API fix
//similarity to test in TabsAdapter
//refactor?
@TargetApi(11)
@UiThreadTest
public void testSwitchTabs() {
MainActivity mainActivity = this.getActivity();
Tab tab1 = mainActivity.getActionBar().getTabAt(0);
Tab tab2 = mainActivity.getActionBar().getTabAt(1);
tab2.select();
ViewPager viewPager = (ViewPager) mainActivity.findViewById(R.id.vPager);
int itemViewedIndex = viewPager.getCurrentItem();
TabsAdapter tabsAdapter = (TabsAdapter) viewPager.getAdapter();
Fragment currentlyViewedFragment = (Fragment) tabsAdapter.instantiateItem(viewPager, itemViewedIndex);
System.out.println("--------------------" + currentlyViewedFragment.getTag());
assertTrue(currentlyViewedFragment.getClass() == TagListFragment.class);
tab1.select();
itemViewedIndex = viewPager.getCurrentItem();
currentlyViewedFragment = (Fragment) tabsAdapter.instantiateItem(viewPager, itemViewedIndex);
System.out.println("--------------------" + currentlyViewedFragment.getTag());
assertTrue(currentlyViewedFragment.getClass() == MovieListFragment.class);
}
//TODO: Name? testOnCreate()?
// Testing to basic stuff?
//API fix
@TargetApi(11)
public void testState() {
MainActivity mainActivity = this.getActivity();
ActionBar actionBar = mainActivity.getActionBar();
assertTrue(actionBar.getNavigationMode() == ActionBar.NAVIGATION_MODE_TABS);
}
public void testActivity() {
MainActivity mainActivity = this.getActivity();
ViewPager viewPager = (ViewPager) mainActivity.findViewById(R.id.vPager);
int viewPagerId = viewPager.getId();
solo.assertCurrentActivity("Check on first activity", MainActivity.class);
assertTrue(solo.waitForFragmentByTag("android:switcher:" + viewPagerId + ":1"));
solo.clickOnText("Tags");
assertTrue(solo.waitForFragmentByTag("android:switcher:" + viewPagerId + ":0"));
solo.clickOnText("Movies");
}
}
| WatchMeTest/src/se/chalmers/watchmetest/activity/MainActivityTest.java | package se.chalmers.watchmetest.activity;
import se.chalmers.watchme.R;
import se.chalmers.watchme.activity.MainActivity;
import se.chalmers.watchme.activity.TabsAdapter;
import se.chalmers.watchme.ui.MovieListFragment;
import se.chalmers.watchme.ui.TagListFragment;
import android.annotation.TargetApi;
import android.app.ActionBar;
import android.app.ActionBar.Tab;
import android.support.v4.app.Fragment;
import android.support.v4.view.ViewPager;
import android.test.ActivityInstrumentationTestCase2;
import android.test.UiThreadTest;
public class MainActivityTest extends
ActivityInstrumentationTestCase2<MainActivity> {
public MainActivityTest() {
super(MainActivity.class);
}
@Override
protected void setUp() throws Exception {
super.setUp();
}
//TODO: API fix
//similarity to test in TabsAdapter
//refactor?
@TargetApi(11)
@UiThreadTest
public void testSwitchTabs() {
MainActivity mainActivity = this.getActivity();
Tab tab1 = mainActivity.getActionBar().getTabAt(0);
Tab tab2 = mainActivity.getActionBar().getTabAt(1);
tab2.select();
ViewPager viewPager = (ViewPager) mainActivity.findViewById(R.id.vPager);
int itemViewedIndex = viewPager.getCurrentItem();
TabsAdapter tabsAdapter = (TabsAdapter) viewPager.getAdapter();
Fragment currentlyViewedFragment = (Fragment) tabsAdapter.instantiateItem(viewPager, itemViewedIndex);
assertTrue(currentlyViewedFragment.getClass() == TagListFragment.class);
tab1.select();
itemViewedIndex = viewPager.getCurrentItem();
currentlyViewedFragment = (Fragment) tabsAdapter.instantiateItem(viewPager, itemViewedIndex);
assertTrue(currentlyViewedFragment.getClass() == MovieListFragment.class);
}
//TODO: Name? testOnCreate()?
// Testing to basic stuff?
//API fix
@TargetApi(11)
public void testState() {
MainActivity mainActivity = this.getActivity();
ActionBar actionBar = mainActivity.getActionBar();
assertTrue(actionBar.getNavigationMode() == ActionBar.NAVIGATION_MODE_TABS);
}
}
| Replaced method testSwitchTabs() in MainActivityTest
| WatchMeTest/src/se/chalmers/watchmetest/activity/MainActivityTest.java | Replaced method testSwitchTabs() in MainActivityTest |
|
Java | mit | 0173c7e78f39a99314fbb5e7a580e53dfa40d9b0 | 0 | jenkinsci/extended-choice-parameter-plugin,Shyri/extended-choice-parameter-plugin,jkorzekwa/extended-choice-parameter-plugin-artifactory,jenkinsci/extended-choice-parameter-plugin,jhosmer/extended-choice-parameter-plugin,jkorzekwa/extended-choice-parameter-plugin-artifactory,jenkinsci/extended-choice-parameter-plugin,Shyri/extended-choice-parameter-plugin,Shyri/extended-choice-parameter-plugin,jhosmer/extended-choice-parameter-plugin | package com.cwctravel.hudson.plugins.extended_choice_parameter;
import hudson.Extension;
import hudson.model.ParameterValue;
import hudson.model.ParameterDefinition;
import hudson.util.FormValidation;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import javax.servlet.ServletException;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.apache.commons.lang.StringUtils;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.taskdefs.Property;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.StaplerRequest;
public class ExtendedChoiceParameterDefinition extends ParameterDefinition {
private static final long serialVersionUID = -2946187268529865645L;
public static final String PARAMETER_TYPE_SINGLE_SELECT = "PT_SINGLE_SELECT";
public static final String PARAMETER_TYPE_MULTI_SELECT = "PT_MULTI_SELECT";
@Extension
public static class DescriptorImpl extends ParameterDescriptor {
@Override
public String getDisplayName() {
return Messages.ExtendedChoiceParameterDefinition_DisplayName();
}
public FormValidation doCheckPropertyFile(@QueryParameter final String propertyFile, @QueryParameter final String propertyKey) throws IOException, ServletException {
if(StringUtils.isBlank(propertyFile)) {
return FormValidation.ok();
}
File prop = new File(propertyFile);
if(!prop.exists()) {
return FormValidation.error(Messages.ExtendedChoiceParameterDefinition_PropertyFileDoesntExist(), propertyFile);
}
Properties p = new Properties();
p.load(new FileInputStream(prop));
if(StringUtils.isNotBlank(propertyKey)) {
if(p.containsKey(propertyKey)) {
return FormValidation.ok();
}
else {
return FormValidation.error(Messages.ExtendedChoiceParameterDefinition_PropertyFileExistsButProvidedKeyIsInvalid(), propertyFile, propertyKey);
}
}
else {
return FormValidation.warning(Messages.ExtendedChoiceParameterDefinition_PropertyFileExistsButNoProvidedKey(), propertyFile);
}
}
public FormValidation doCheckPropertyKey(@QueryParameter final String propertyFile, @QueryParameter final String propertyKey) throws IOException, ServletException {
return doCheckPropertyFile(propertyFile, propertyKey);
}
public FormValidation doCheckDefaultPropertyFile(@QueryParameter final String defaultPropertyFile,
@QueryParameter final String defaultPropertyKey) throws IOException, ServletException {
return doCheckPropertyFile(defaultPropertyFile, defaultPropertyKey);
}
public FormValidation doCheckDefaultPropertyKey(@QueryParameter final String defaultPropertyFile,
@QueryParameter final String defaultPropertyKey) throws IOException, ServletException {
return doCheckPropertyFile(defaultPropertyFile, defaultPropertyKey);
}
}
private boolean quoteValue;
private String type;
private String value;
private String propertyFile;
private String propertyKey;
private String defaultValue;
private String defaultPropertyFile;
private String defaultPropertyKey;
@DataBoundConstructor
public ExtendedChoiceParameterDefinition(String name, String type, String value, String propertyFile, String propertyKey, String defaultValue,
String defaultPropertyFile, String defaultPropertyKey, boolean quoteValue, String description) {
super(name, description);
this.type = type;
this.propertyFile = propertyFile;
this.propertyKey = propertyKey;
this.defaultPropertyFile = defaultPropertyFile;
this.defaultPropertyKey = defaultPropertyKey;
this.value = value;
this.defaultValue = defaultValue;
this.quoteValue = quoteValue;
}
private Map<String, Boolean> computeDefaultValueMap() {
Map<String, Boolean> defaultValueMap = null;
String effectiveDefaultValue = getEffectiveDefaultValue();
if(!StringUtils.isBlank(effectiveDefaultValue)) {
defaultValueMap = new HashMap<String, Boolean>();
String[] defaultValues = StringUtils.split(effectiveDefaultValue, ',');
for(String value: defaultValues) {
defaultValueMap.put(StringUtils.trim(value), true);
}
}
return defaultValueMap;
}
@Override
public ParameterValue createValue(StaplerRequest request) {
String value[] = request.getParameterValues(getName());
if(value == null) {
return getDefaultParameterValue();
}
return null;
}
@Override
public ParameterValue createValue(StaplerRequest request, JSONObject jO) {
Object value = jO.get("value");
String strValue = "";
if(value instanceof String) {
strValue = (String)value;
}
else if(value instanceof JSONArray) {
JSONArray jsonValues = (JSONArray)value;
strValue = StringUtils.join(jsonValues.iterator(), ',');
}
if(quoteValue) {
strValue = "\"" + strValue + "\"";
}
return new ExtendedChoiceParameterValue(getName(), strValue);
}
@Override
public ParameterValue getDefaultParameterValue() {
String defaultValue = getEffectiveDefaultValue();
if(!StringUtils.isBlank(defaultValue)) {
if(quoteValue) {
defaultValue = "\"" + defaultValue + "\"";
}
return new ExtendedChoiceParameterValue(getName(), defaultValue);
}
return super.getDefaultParameterValue();
}
private String computeValue(String value, String propertyFilePath, String propertyKey) {
if(!StringUtils.isBlank(propertyFile) && !StringUtils.isBlank(propertyKey)) {
try {
File propertyFile = new File(propertyFilePath);
Project project = new Project();
Property property = new Property();
property.setProject(project);
property.setFile(propertyFile);
property.execute();
return project.getProperty(propertyKey);
}
catch(Exception e) {
}
}
else if(!StringUtils.isBlank(value)) {
return value;
}
return null;
}
@Override
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getEffectiveDefaultValue() {
return computeValue(defaultValue, defaultPropertyFile, defaultPropertyKey);
}
public String getDefaultValue() {
return defaultValue;
}
public void setDefaultValue(String defaultValue) {
this.defaultValue = defaultValue;
}
public String getPropertyFile() {
return propertyFile;
}
public void setPropertyFile(String propertyFile) {
this.propertyFile = propertyFile;
}
public String getDefaultPropertyKey() {
return defaultPropertyKey;
}
public void setDefaultPropertyKey(String defaultPropertyKey) {
this.defaultPropertyKey = defaultPropertyKey;
}
public String getEffectiveValue() {
return computeValue(value, propertyFile, propertyKey);
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
public String getPropertyKey() {
return propertyKey;
}
public void setPropertyKey(String propertyKey) {
this.propertyKey = propertyKey;
}
public String getDefaultPropertyFile() {
return defaultPropertyFile;
}
public boolean isQuoteValue() {
return quoteValue;
}
public void setQuoteValue(boolean quoteValue) {
this.quoteValue = quoteValue;
}
public void setDefaultPropertyFile(String defaultPropertyFile) {
this.defaultPropertyFile = defaultPropertyFile;
}
public Map<String, Boolean> getDefaultValueMap() {
return computeDefaultValueMap();
}
}
| src/main/java/com/cwctravel/hudson/plugins/extended_choice_parameter/ExtendedChoiceParameterDefinition.java | package com.cwctravel.hudson.plugins.extended_choice_parameter;
import hudson.Extension;
import hudson.model.ParameterValue;
import hudson.model.ParameterDefinition;
import hudson.util.FormValidation;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import javax.servlet.ServletException;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.apache.commons.lang.StringUtils;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.taskdefs.Property;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.StaplerRequest;
public class ExtendedChoiceParameterDefinition extends ParameterDefinition {
private static final long serialVersionUID = -2946187268529865645L;
public static final String PARAMETER_TYPE_SINGLE_SELECT = "PT_SINGLE_SELECT";
public static final String PARAMETER_TYPE_MULTI_SELECT = "PT_MULTI_SELECT";
@Extension
public static class DescriptorImpl extends ParameterDescriptor {
@Override
public String getDisplayName() {
return Messages.ExtendedChoiceParameterDefinition_DisplayName();
}
public FormValidation doCheckPropertyFile(@QueryParameter final String propertyFile, @QueryParameter final String propertyKey) throws IOException, ServletException {
if(StringUtils.isBlank(propertyFile)) {
return FormValidation.ok();
}
File prop = new File(propertyFile);
if(!prop.exists()) {
return FormValidation.error(Messages.ExtendedChoiceParameterDefinition_PropertyFileDoesntExist(), propertyFile);
}
Properties p = new Properties();
p.load(new FileInputStream(prop));
if(StringUtils.isNotBlank(propertyKey)) {
if(p.containsKey(propertyKey)) {
return FormValidation.ok();
}
else {
return FormValidation.error(Messages.ExtendedChoiceParameterDefinition_PropertyFileExistsButProvidedKeyIsInvalid(), propertyFile, propertyKey);
}
}
else {
return FormValidation.warning(Messages.ExtendedChoiceParameterDefinition_PropertyFileExistsButNoProvidedKey(), propertyFile);
}
}
public FormValidation doCheckPropertyKey(@QueryParameter final String propertyFile, @QueryParameter final String propertyKey) throws IOException, ServletException {
return doCheckPropertyFile(propertyFile, propertyKey);
}
public FormValidation doCheckDefaultPropertyFile(@QueryParameter final String defaultPropertyFile,
@QueryParameter final String defaultPropertyKey) throws IOException, ServletException {
return doCheckPropertyFile(defaultPropertyFile, defaultPropertyKey);
}
public FormValidation doCheckDefaultPropertyKey(@QueryParameter final String defaultPropertyFile,
@QueryParameter final String defaultPropertyKey) throws IOException, ServletException {
return doCheckPropertyFile(defaultPropertyFile, defaultPropertyKey);
}
}
private boolean quoteValue;
private String type;
private String value;
private String propertyFile;
private String propertyKey;
private String defaultValue;
private String defaultPropertyFile;
private String defaultPropertyKey;
private transient Map<File, Long> fileTimestampMap = new HashMap<File, Long>();
private transient Map<File, Project> fileProjectMap = new HashMap<File, Project>();
@DataBoundConstructor
public ExtendedChoiceParameterDefinition(String name, String type, String value, String propertyFile, String propertyKey, String defaultValue,
String defaultPropertyFile, String defaultPropertyKey, boolean quoteValue, String description) {
super(name, description);
this.type = type;
this.propertyFile = propertyFile;
this.propertyKey = propertyKey;
this.defaultPropertyFile = defaultPropertyFile;
this.defaultPropertyKey = defaultPropertyKey;
this.value = value;
this.defaultValue = defaultValue;
this.quoteValue = quoteValue;
}
private Map<String, Boolean> computeDefaultValueMap() {
Map<String, Boolean> defaultValueMap = null;
String effectiveDefaultValue = getEffectiveDefaultValue();
if(!StringUtils.isBlank(effectiveDefaultValue)) {
defaultValueMap = new HashMap<String, Boolean>();
String[] defaultValues = StringUtils.split(effectiveDefaultValue, ',');
for(String value: defaultValues) {
defaultValueMap.put(StringUtils.trim(value), true);
}
}
return defaultValueMap;
}
@Override
public ParameterValue createValue(StaplerRequest request) {
String value[] = request.getParameterValues(getName());
if(value == null) {
return getDefaultParameterValue();
}
return null;
}
@Override
public ParameterValue createValue(StaplerRequest request, JSONObject jO) {
Object value = jO.get("value");
String strValue = "";
if(value instanceof String) {
strValue = (String)value;
}
else if(value instanceof JSONArray) {
JSONArray jsonValues = (JSONArray)value;
strValue = StringUtils.join(jsonValues.iterator(), ',');
}
if(quoteValue) {
strValue = "\"" + strValue + "\"";
}
return new ExtendedChoiceParameterValue(getName(), strValue);
}
@Override
public ParameterValue getDefaultParameterValue() {
String defaultValue = getEffectiveDefaultValue();
if(!StringUtils.isBlank(defaultValue)) {
if(quoteValue) {
defaultValue = "\"" + defaultValue + "\"";
}
return new ExtendedChoiceParameterValue(getName(), defaultValue);
}
return super.getDefaultParameterValue();
}
private String computeValue(String value, String propertyFilePath, String propertyKey) {
if(fileTimestampMap == null) {
fileTimestampMap = new HashMap<File, Long>();
}
if(fileProjectMap == null) {
fileProjectMap = new HashMap<File, Project>();
}
if(!StringUtils.isBlank(propertyFile) && !StringUtils.isBlank(propertyKey)) {
try {
File propertyFile = new File(propertyFilePath);
Project project = fileProjectMap.get(propertyFile);
Long lastTimestamp = fileTimestampMap.get(propertyFile);
long currentTimestamp = propertyFile.lastModified();
if(project == null || lastTimestamp == null || currentTimestamp != lastTimestamp) {
project = new Project();
Property property = new Property();
property.setProject(project);
property.setFile(propertyFile);
property.execute();
fileProjectMap.put(propertyFile, project);
fileTimestampMap.put(propertyFile, currentTimestamp);
}
return project.getProperty(propertyKey);
}
catch(Exception e) {
}
}
else if(!StringUtils.isBlank(value)) {
return value;
}
return null;
}
@Override
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getEffectiveDefaultValue() {
return computeValue(defaultValue, defaultPropertyFile, defaultPropertyKey);
}
public String getDefaultValue() {
return defaultValue;
}
public void setDefaultValue(String defaultValue) {
this.defaultValue = defaultValue;
}
public String getPropertyFile() {
return propertyFile;
}
public void setPropertyFile(String propertyFile) {
this.propertyFile = propertyFile;
}
public String getDefaultPropertyKey() {
return defaultPropertyKey;
}
public void setDefaultPropertyKey(String defaultPropertyKey) {
this.defaultPropertyKey = defaultPropertyKey;
}
public String getEffectiveValue() {
return computeValue(value, propertyFile, propertyKey);
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
public String getPropertyKey() {
return propertyKey;
}
public void setPropertyKey(String propertyKey) {
this.propertyKey = propertyKey;
}
public String getDefaultPropertyFile() {
return defaultPropertyFile;
}
public boolean isQuoteValue() {
return quoteValue;
}
public void setQuoteValue(boolean quoteValue) {
this.quoteValue = quoteValue;
}
public void setDefaultPropertyFile(String defaultPropertyFile) {
this.defaultPropertyFile = defaultPropertyFile;
}
public Map<String, Boolean> getDefaultValueMap() {
return computeDefaultValueMap();
}
}
| changes to resolve properties like ant
| src/main/java/com/cwctravel/hudson/plugins/extended_choice_parameter/ExtendedChoiceParameterDefinition.java | changes to resolve properties like ant |
|
Java | mit | 5a036501c81917c72ebd0c16ac0a294abcb6d4dd | 0 | InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service | package com.worth.ifs.application.transactional;
import static com.worth.ifs.commons.error.CommonErrors.notFoundError;
import static com.worth.ifs.commons.service.ServiceResult.serviceSuccess;
import static com.worth.ifs.util.EntityLookupCallbacks.find;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.data.domain.Sort.Direction;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;
import com.worth.ifs.application.constant.ApplicationStatusConstants;
import com.worth.ifs.application.domain.Application;
import com.worth.ifs.application.mapper.ApplicationSummaryMapper;
import com.worth.ifs.application.mapper.ApplicationSummaryPageMapper;
import com.worth.ifs.application.mapper.ClosedCompetitionApplicationSummaryMapper;
import com.worth.ifs.application.mapper.ClosedCompetitionApplicationSummaryPageMapper;
import com.worth.ifs.application.resource.ApplicationSummaryPageResource;
import com.worth.ifs.application.resource.ApplicationSummaryResource;
import com.worth.ifs.application.resource.ClosedCompetitionApplicationSummaryPageResource;
import com.worth.ifs.application.resource.ClosedCompetitionApplicationSummaryResource;
import com.worth.ifs.application.resource.CompetitionSummaryResource;
import com.worth.ifs.application.resource.CompletedPercentageResource;
import com.worth.ifs.application.resource.PageResource;
import com.worth.ifs.application.resource.comparators.ApplicationSummaryResourceLeadComparator;
import com.worth.ifs.application.resource.comparators.ApplicationSummaryResourcePercentageCompleteComparator;
import com.worth.ifs.application.resource.comparators.ClosedCompetitionApplicationSummaryGrantRequestedComparator;
import com.worth.ifs.application.resource.comparators.ClosedCompetitionApplicationSummaryLeadComparator;
import com.worth.ifs.application.resource.comparators.ClosedCompetitionApplicationSummaryNumberOfPartnersComparator;
import com.worth.ifs.application.resource.comparators.ClosedCompetitionApplicationSummaryTotalProjectCostComparator;
import com.worth.ifs.commons.service.ServiceResult;
import com.worth.ifs.competition.domain.Competition;
import com.worth.ifs.transactional.BaseTransactionalService;
@Service
public class ApplicationSummaryServiceImpl extends BaseTransactionalService implements ApplicationSummaryService {
private static final int PAGE_SIZE = 20;
private static final Collection<Long> SUBMITTED_STATUS_IDS = Arrays.asList(
ApplicationStatusConstants.APPROVED.getId(),
ApplicationStatusConstants.REJECTED.getId(),
ApplicationStatusConstants.SUBMITTED.getId());
@Autowired
private ApplicationSummaryMapper applicationSummaryMapper;
@Autowired
private ApplicationSummaryPageMapper applicationSummaryPageMapper;
@Autowired
private ApplicationService applicationService;
@Autowired
private ClosedCompetitionApplicationSummaryMapper closedCompetitionApplicationSummaryMapper;
@Autowired
private ClosedCompetitionApplicationSummaryPageMapper closedCompetitionApplicationSummaryPageMapper;
@Override
public ServiceResult<ApplicationSummaryPageResource> getApplicationSummariesByCompetitionId(Long competitionId, int pageIndex, String sortBy) {
String[] sortField = getApplicationSummarySortField(sortBy);
Pageable pageable = new PageRequest(pageIndex, PAGE_SIZE, new Sort(Direction.ASC, sortField));
if(canUseSpringDataPaginationForSummaryResults(sortBy)){
Page<Application> applicationResults = applicationRepository.findByCompetitionId(competitionId, pageable);
return find(applicationResults, notFoundError(Page.class)).andOnSuccessReturn(applicationSummaryPageMapper::mapToResource);
}
List<Application> resultsList = applicationRepository.findByCompetitionId(competitionId);
ApplicationSummaryPageResource result = new ApplicationSummaryPageResource();
result.setContent(sortAndRestrictSummaryResults(resultsList, pageable, sortBy));
return pageFromUnsortedApplicationResults(result, resultsList, pageable, sortBy, ApplicationSummaryPageResource.class);
}
@Override
public ServiceResult<CompetitionSummaryResource> getCompetitionSummaryByCompetitionId(Long competitionId){
Competition competition = competitionRepository.findById(competitionId);
CompetitionSummaryResource competitionSummaryResource = new CompetitionSummaryResource();
competitionSummaryResource.setCompetitionId(competitionId);
competitionSummaryResource.setCompetitionName(competition.getName());
competitionSummaryResource.setCompetitionStatus(competition.getCompetitionStatus());
competitionSummaryResource.setTotalNumberOfApplications(applicationRepository.countByCompetitionId(competitionId));
competitionSummaryResource.setApplicationsStarted(applicationRepository.countByCompetitionIdAndApplicationStatusId(competitionId, ApplicationStatusConstants.OPEN.getId()));
competitionSummaryResource.setApplicationsInProgress(getApplicationInProgressCountByCompetitionId(competitionId));
competitionSummaryResource.setApplicationsSubmitted(applicationRepository.countByCompetitionIdAndApplicationStatusIdIn(competitionId, SUBMITTED_STATUS_IDS));
competitionSummaryResource.setApplicationsNotSubmitted(competitionSummaryResource.getTotalNumberOfApplications() - competitionSummaryResource.getApplicationsSubmitted());
competitionSummaryResource.setApplicationDeadline(competition.getEndDate());
return serviceSuccess(competitionSummaryResource);
}
private Long getApplicationInProgressCountByCompetitionId(Long competitionId) {
Long inProgressCount = 0L;
final List<Application> applications = applicationRepository.findByCompetitionIdAndApplicationStatusIdNotIn(competitionId, SUBMITTED_STATUS_IDS);
for(Application application : applications){
final CompletedPercentageResource completedPercentageResource = applicationService.getProgressPercentageByApplicationId(application.getId()).getSuccessObject();
if(completedPercentageResource.getCompletedPercentage().intValue() > 50) {
inProgressCount++;
}
}
return inProgressCount;
}
@Override
public ServiceResult<ClosedCompetitionApplicationSummaryPageResource> getSubmittedApplicationSummariesForClosedCompetitionByCompetitionId(
Long competitionId, int pageIndex, String sortBy) {
return getClosedCompetitionApplicationSummariesByCompetitionId(competitionId, pageIndex, sortBy, true);
}
@Override
public ServiceResult<ClosedCompetitionApplicationSummaryPageResource> getNotSubmittedApplicationSummariesForClosedCompetitionByCompetitionId(
Long competitionId, int pageIndex, String sortBy) {
return getClosedCompetitionApplicationSummariesByCompetitionId(competitionId, pageIndex, sortBy, false);
}
private ServiceResult<ClosedCompetitionApplicationSummaryPageResource> getClosedCompetitionApplicationSummariesByCompetitionId(
Long competitionId, int pageIndex, String sortBy, boolean submitted) {
String[] sortField = getClosedCompetitionApplicationSummarySortField(sortBy);
Pageable pageable = new PageRequest(pageIndex, PAGE_SIZE, new Sort(Direction.ASC, sortField));
if(canUseSpringDataPaginationForClosedCompetitionResults(sortBy)){
Page<Application> applicationResults;
if(submitted) {
applicationResults = applicationRepository.findByCompetitionIdAndApplicationStatusIdIn(competitionId, SUBMITTED_STATUS_IDS, pageable);
} else {
applicationResults = applicationRepository.findByCompetitionIdAndApplicationStatusIdNotIn(competitionId, SUBMITTED_STATUS_IDS, pageable);
}
return find(applicationResults, notFoundError(Page.class)).andOnSuccessReturn(closedCompetitionApplicationSummaryPageMapper::mapToResource);
}
List<Application> resultsList;
if(submitted) {
resultsList = applicationRepository.findByCompetitionIdAndApplicationStatusIdIn(competitionId, SUBMITTED_STATUS_IDS);
} else {
resultsList = applicationRepository.findByCompetitionIdAndApplicationStatusIdNotIn(competitionId, SUBMITTED_STATUS_IDS);
}
ClosedCompetitionApplicationSummaryPageResource result = new ClosedCompetitionApplicationSummaryPageResource();
result.setContent(closedCompetitionSortAndRestrictResults(resultsList, pageable, sortBy));
return pageFromUnsortedApplicationResults(result, resultsList, pageable, sortBy, ClosedCompetitionApplicationSummaryPageResource.class);
}
@Override
public List<Application> getApplicationSummariesByCompetitionIdAndStatus(Long competitionId, Long applicationStatusId) {
List<Application> applicationResults = applicationRepository.findByCompetitionIdAndApplicationStatusId(competitionId, applicationStatusId);
return applicationResults;
}
private <U, T extends PageResource<U>> ServiceResult<T> pageFromUnsortedApplicationResults(T result, List<Application> resultsList, Pageable pageable, String sortBy, Class clazz) {
result.setNumber(pageable.getPageNumber());
result.setSize(pageable.getPageSize());
result.setTotalElements(resultsList.size());
result.setTotalPages((resultsList.size() / pageable.getPageSize()) + 1);
return find(result, notFoundError(clazz));
}
private List<ApplicationSummaryResource> sortAndRestrictSummaryResults(List<Application> resultsList, Pageable pageable, String sortBy) {
return resultsList.stream()
.map(applicationSummaryMapper::mapToResource)
.sorted((i1, i2) -> {
if("id".equals(sortBy)) {
return 0;
} else if("lead".equals(sortBy)) {
return new ApplicationSummaryResourceLeadComparator().compare(i1, i2);
}
return new ApplicationSummaryResourcePercentageCompleteComparator().compare(i1, i2);
})
.skip(pageable.getOffset())
.limit(pageable.getPageSize())
.collect(Collectors.toList());
}
private boolean canUseSpringDataPaginationForSummaryResults(String sortBy) {
return "id".equals(sortBy) || "name".equals(sortBy) || "status".equals(sortBy);
}
private String[] getApplicationSummarySortField(String sortBy) {
if(StringUtils.isEmpty(sortBy)){
return new String[]{"id"};
}
switch (sortBy) {
case "id":
return new String[]{"id"};
case "name":
return new String[]{"name", "id"};
case "status":
return new String[]{"applicationStatus.name", "id"};
default:
return new String[]{"id"};
}
}
private String[] getClosedCompetitionApplicationSummarySortField(String sortBy) {
if(StringUtils.isEmpty(sortBy)){
return new String[]{"id"};
}
switch (sortBy) {
case "id":
return new String[]{"id"};
case "name":
return new String[]{"name", "id"};
case "duration":
return new String[]{"durationInMonths", "id"};
default:
return new String[]{"id"};
}
}
private boolean canUseSpringDataPaginationForClosedCompetitionResults(String sortBy) {
return !("numberOfPartners".equals(sortBy) || "lead".equals(sortBy) | "grantRequested".equals(sortBy) || "totalProjectCost".equals(sortBy));
}
private List<ClosedCompetitionApplicationSummaryResource> closedCompetitionSortAndRestrictResults(List<Application> resultsList, Pageable pageable, String sortBy) {
return resultsList.stream()
.map(closedCompetitionApplicationSummaryMapper::mapToResource)
.sorted((i1, i2) -> {
if("numberOfPartners".equals(sortBy)) {
return new ClosedCompetitionApplicationSummaryNumberOfPartnersComparator().compare(i1, i2);
} else if("lead".equals(sortBy)) {
return new ClosedCompetitionApplicationSummaryLeadComparator().compare(i1, i2);
} else if("grantRequested".equals(sortBy)) {
return new ClosedCompetitionApplicationSummaryGrantRequestedComparator().compare(i1, i2);
} else if("totalProjectCost".equals(sortBy)) {
return new ClosedCompetitionApplicationSummaryTotalProjectCostComparator().compare(i1, i2);
}
return 0;
})
.skip(pageable.getOffset())
.limit(pageable.getPageSize())
.collect(Collectors.toList());
}
}
| ifs-data-service/src/main/java/com/worth/ifs/application/transactional/ApplicationSummaryServiceImpl.java | package com.worth.ifs.application.transactional;
import static com.worth.ifs.commons.error.CommonErrors.notFoundError;
import static com.worth.ifs.commons.service.ServiceResult.serviceSuccess;
import static com.worth.ifs.util.EntityLookupCallbacks.find;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.data.domain.Sort.Direction;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;
import com.worth.ifs.application.constant.ApplicationStatusConstants;
import com.worth.ifs.application.domain.Application;
import com.worth.ifs.application.mapper.ApplicationSummaryMapper;
import com.worth.ifs.application.mapper.ApplicationSummaryPageMapper;
import com.worth.ifs.application.mapper.ClosedCompetitionApplicationSummaryMapper;
import com.worth.ifs.application.mapper.ClosedCompetitionApplicationSummaryPageMapper;
import com.worth.ifs.application.resource.ApplicationSummaryPageResource;
import com.worth.ifs.application.resource.ApplicationSummaryResource;
import com.worth.ifs.application.resource.ClosedCompetitionApplicationSummaryPageResource;
import com.worth.ifs.application.resource.ClosedCompetitionApplicationSummaryResource;
import com.worth.ifs.application.resource.CompetitionSummaryResource;
import com.worth.ifs.application.resource.CompletedPercentageResource;
import com.worth.ifs.application.resource.PageResource;
import com.worth.ifs.application.resource.comparators.ApplicationSummaryResourceLeadComparator;
import com.worth.ifs.application.resource.comparators.ApplicationSummaryResourcePercentageCompleteComparator;
import com.worth.ifs.application.resource.comparators.ClosedCompetitionApplicationSummaryGrantRequestedComparator;
import com.worth.ifs.application.resource.comparators.ClosedCompetitionApplicationSummaryLeadComparator;
import com.worth.ifs.application.resource.comparators.ClosedCompetitionApplicationSummaryNumberOfPartnersComparator;
import com.worth.ifs.application.resource.comparators.ClosedCompetitionApplicationSummaryTotalProjectCostComparator;
import com.worth.ifs.commons.service.ServiceResult;
import com.worth.ifs.competition.domain.Competition;
import com.worth.ifs.transactional.BaseTransactionalService;
@Service
public class ApplicationSummaryServiceImpl extends BaseTransactionalService implements ApplicationSummaryService {
private static final int PAGE_SIZE = 20;
private static final Collection<Long> SUBMITTED_STATUS_IDS = Arrays.asList(
ApplicationStatusConstants.APPROVED.getId(),
ApplicationStatusConstants.REJECTED.getId(),
ApplicationStatusConstants.SUBMITTED.getId());
@Autowired
private ApplicationSummaryMapper applicationSummaryMapper;
@Autowired
private ApplicationSummaryPageMapper applicationSummaryPageMapper;
@Autowired
private ApplicationService applicationService;
@Autowired
private ClosedCompetitionApplicationSummaryMapper closedCompetitionApplicationSummaryMapper;
@Autowired
private ClosedCompetitionApplicationSummaryPageMapper closedCompetitionApplicationSummaryPageMapper;
@Override
public ServiceResult<ApplicationSummaryPageResource> getApplicationSummariesByCompetitionId(Long competitionId, int pageIndex, String sortBy) {
String[] sortField = getApplicationSummarySortField(sortBy);
Pageable pageable = new PageRequest(pageIndex, PAGE_SIZE, new Sort(Direction.ASC, sortField));
if(canUseSpringDataPaginationForSummaryResults(sortBy)){
Page<Application> applicationResults = applicationRepository.findByCompetitionId(competitionId, pageable);
return find(applicationResults, notFoundError(Page.class)).andOnSuccessReturn(applicationSummaryPageMapper::mapToResource);
}
List<Application> resultsList = applicationRepository.findByCompetitionId(competitionId);
ApplicationSummaryPageResource result = new ApplicationSummaryPageResource();
result.setContent(sortAndRestrictSummaryResults(resultsList, pageable, sortBy));
return pageFromUnsortedApplicationResults(result, resultsList, pageable, sortBy, ApplicationSummaryPageResource.class);
}
@Override
public ServiceResult<CompetitionSummaryResource> getCompetitionSummaryByCompetitionId(Long competitionId){
Competition competition = competitionRepository.findById(competitionId);
CompetitionSummaryResource competitionSummaryResource = new CompetitionSummaryResource();
competitionSummaryResource.setCompetitionId(competitionId);
competitionSummaryResource.setCompetitionName(competition.getName());
competitionSummaryResource.setCompetitionStatus(competition.getCompetitionStatus());
competitionSummaryResource.setTotalNumberOfApplications(applicationRepository.countByCompetitionId(competitionId));
competitionSummaryResource.setApplicationsStarted(applicationRepository.countByCompetitionIdAndApplicationStatusId(competitionId, ApplicationStatusConstants.OPEN.getId()));
competitionSummaryResource.setApplicationsInProgress(getApplicationInProgressCountByCompetitionId(competitionId));
competitionSummaryResource.setApplicationsSubmitted(applicationRepository.countByCompetitionIdAndApplicationStatusIdIn(competitionId, SUBMITTED_STATUS_IDS));
competitionSummaryResource.setApplicationsNotSubmitted(competitionSummaryResource.getTotalNumberOfApplications() - competitionSummaryResource.getApplicationsSubmitted());
competitionSummaryResource.setApplicationDeadline(competition.getEndDate());
return serviceSuccess(competitionSummaryResource);
}
private Long getApplicationInProgressCountByCompetitionId(Long competitionId) {
final List<Application> applications = applicationRepository.findByCompetitionId(competitionId);
Long inProgressCount = 0l;
for(Application application : applications){
final CompletedPercentageResource completedPercentageResource = applicationService.getProgressPercentageByApplicationId(application.getId()).getSuccessObject();
if(completedPercentageResource.getCompletedPercentage().intValue() > 50 && !(application.getApplicationStatus().equals(ApplicationStatusConstants.SUBMITTED))){
inProgressCount++;
}
}
return inProgressCount;
}
@Override
public ServiceResult<ClosedCompetitionApplicationSummaryPageResource> getSubmittedApplicationSummariesForClosedCompetitionByCompetitionId(
Long competitionId, int pageIndex, String sortBy) {
return getClosedCompetitionApplicationSummariesByCompetitionId(competitionId, pageIndex, sortBy, true);
}
@Override
public ServiceResult<ClosedCompetitionApplicationSummaryPageResource> getNotSubmittedApplicationSummariesForClosedCompetitionByCompetitionId(
Long competitionId, int pageIndex, String sortBy) {
return getClosedCompetitionApplicationSummariesByCompetitionId(competitionId, pageIndex, sortBy, false);
}
private ServiceResult<ClosedCompetitionApplicationSummaryPageResource> getClosedCompetitionApplicationSummariesByCompetitionId(
Long competitionId, int pageIndex, String sortBy, boolean submitted) {
String[] sortField = getClosedCompetitionApplicationSummarySortField(sortBy);
Pageable pageable = new PageRequest(pageIndex, PAGE_SIZE, new Sort(Direction.ASC, sortField));
if(canUseSpringDataPaginationForClosedCompetitionResults(sortBy)){
Page<Application> applicationResults;
if(submitted) {
applicationResults = applicationRepository.findByCompetitionIdAndApplicationStatusIdIn(competitionId, SUBMITTED_STATUS_IDS, pageable);
} else {
applicationResults = applicationRepository.findByCompetitionIdAndApplicationStatusIdNotIn(competitionId, SUBMITTED_STATUS_IDS, pageable);
}
return find(applicationResults, notFoundError(Page.class)).andOnSuccessReturn(closedCompetitionApplicationSummaryPageMapper::mapToResource);
}
List<Application> resultsList;
if(submitted) {
resultsList = applicationRepository.findByCompetitionIdAndApplicationStatusIdIn(competitionId, SUBMITTED_STATUS_IDS);
} else {
resultsList = applicationRepository.findByCompetitionIdAndApplicationStatusIdNotIn(competitionId, SUBMITTED_STATUS_IDS);
}
ClosedCompetitionApplicationSummaryPageResource result = new ClosedCompetitionApplicationSummaryPageResource();
result.setContent(closedCompetitionSortAndRestrictResults(resultsList, pageable, sortBy));
return pageFromUnsortedApplicationResults(result, resultsList, pageable, sortBy, ClosedCompetitionApplicationSummaryPageResource.class);
}
@Override
public List<Application> getApplicationSummariesByCompetitionIdAndStatus(Long competitionId, Long applicationStatusId) {
List<Application> applicationResults = applicationRepository.findByCompetitionIdAndApplicationStatusId(competitionId, applicationStatusId);
return applicationResults;
}
private <U, T extends PageResource<U>> ServiceResult<T> pageFromUnsortedApplicationResults(T result, List<Application> resultsList, Pageable pageable, String sortBy, Class clazz) {
result.setNumber(pageable.getPageNumber());
result.setSize(pageable.getPageSize());
result.setTotalElements(resultsList.size());
result.setTotalPages((resultsList.size() / pageable.getPageSize()) + 1);
return find(result, notFoundError(clazz));
}
private List<ApplicationSummaryResource> sortAndRestrictSummaryResults(List<Application> resultsList, Pageable pageable, String sortBy) {
return resultsList.stream()
.map(applicationSummaryMapper::mapToResource)
.sorted((i1, i2) -> {
if("id".equals(sortBy)) {
return 0;
} else if("lead".equals(sortBy)) {
return new ApplicationSummaryResourceLeadComparator().compare(i1, i2);
}
return new ApplicationSummaryResourcePercentageCompleteComparator().compare(i1, i2);
})
.skip(pageable.getOffset())
.limit(pageable.getPageSize())
.collect(Collectors.toList());
}
private boolean canUseSpringDataPaginationForSummaryResults(String sortBy) {
return "id".equals(sortBy) || "name".equals(sortBy) || "status".equals(sortBy);
}
private String[] getApplicationSummarySortField(String sortBy) {
if(StringUtils.isEmpty(sortBy)){
return new String[]{"id"};
}
switch (sortBy) {
case "id":
return new String[]{"id"};
case "name":
return new String[]{"name", "id"};
case "status":
return new String[]{"applicationStatus.name", "id"};
default:
return new String[]{"id"};
}
}
private String[] getClosedCompetitionApplicationSummarySortField(String sortBy) {
if(StringUtils.isEmpty(sortBy)){
return new String[]{"id"};
}
switch (sortBy) {
case "id":
return new String[]{"id"};
case "name":
return new String[]{"name", "id"};
case "duration":
return new String[]{"durationInMonths", "id"};
default:
return new String[]{"id"};
}
}
private boolean canUseSpringDataPaginationForClosedCompetitionResults(String sortBy) {
return !("numberOfPartners".equals(sortBy) || "lead".equals(sortBy) | "grantRequested".equals(sortBy) || "totalProjectCost".equals(sortBy));
}
private List<ClosedCompetitionApplicationSummaryResource> closedCompetitionSortAndRestrictResults(List<Application> resultsList, Pageable pageable, String sortBy) {
return resultsList.stream()
.map(closedCompetitionApplicationSummaryMapper::mapToResource)
.sorted((i1, i2) -> {
if("numberOfPartners".equals(sortBy)) {
return new ClosedCompetitionApplicationSummaryNumberOfPartnersComparator().compare(i1, i2);
} else if("lead".equals(sortBy)) {
return new ClosedCompetitionApplicationSummaryLeadComparator().compare(i1, i2);
} else if("grantRequested".equals(sortBy)) {
return new ClosedCompetitionApplicationSummaryGrantRequestedComparator().compare(i1, i2);
} else if("totalProjectCost".equals(sortBy)) {
return new ClosedCompetitionApplicationSummaryTotalProjectCostComparator().compare(i1, i2);
}
return 0;
})
.skip(pageable.getOffset())
.limit(pageable.getPageSize())
.collect(Collectors.toList());
}
}
| Ensuring submitted applications are not counted for inprogress. INFUND-2377
| ifs-data-service/src/main/java/com/worth/ifs/application/transactional/ApplicationSummaryServiceImpl.java | Ensuring submitted applications are not counted for inprogress. INFUND-2377 |
|
Java | epl-1.0 | 356b30572b46bb7fcb735ce8fb4dcae28b3cba5f | 0 | tavalin/openhab2-addons,jarlebh/openhab2-addons,afuechsel/openhab2,gerrieg/openhab2,Mr-Eskildsen/openhab2-addons,aogorek/openhab2-addons,Snickermicker/openhab2,aogorek/openhab2-addons,lewie/openhab2,gerrieg/openhab2,tavalin/openhab2-addons,theoweiss/openhab2,pail23/openhab2-addons,jarlebh/openhab2-addons,Mr-Eskildsen/openhab2-addons,digitaldan/openhab2,theoweiss/openhab2,aogorek/openhab2-addons,jarlebh/openhab2-addons,pail23/openhab2-addons,trokohl/openhab2-addons,jarlebh/openhab2-addons,pail23/openhab2-addons,aogorek/openhab2-addons,jarlebh/openhab2-addons,Mr-Eskildsen/openhab2-addons,pgfeller/openhab2-addons,tavalin/openhab2-addons,trokohl/openhab2-addons,trokohl/openhab2-addons,tavalin/openhab2-addons,aogorek/openhab2-addons,theoweiss/openhab2,pail23/openhab2-addons,pgfeller/openhab2-addons,Mr-Eskildsen/openhab2-addons,Snickermicker/openhab2,afuechsel/openhab2,clinique/openhab2,digitaldan/openhab2,trokohl/openhab2-addons,afuechsel/openhab2,Snickermicker/openhab2,lewie/openhab2,jarlebh/openhab2-addons,trokohl/openhab2-addons,pail23/openhab2-addons,pgfeller/openhab2-addons,clinique/openhab2,gerrieg/openhab2,digitaldan/openhab2,Mr-Eskildsen/openhab2-addons,tavalin/openhab2-addons,lewie/openhab2,Snickermicker/openhab2,pgfeller/openhab2-addons,gerrieg/openhab2,theoweiss/openhab2,clinique/openhab2,digitaldan/openhab2,pgfeller/openhab2-addons,trokohl/openhab2-addons,clinique/openhab2,tavalin/openhab2-addons,aogorek/openhab2-addons | /**
* Copyright (c) 2010-2018 by the respective copyright holders.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.openhab.binding.tesla.handler;
import static org.openhab.binding.tesla.TeslaBindingConstants.*;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.security.GeneralSecurityException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantLock;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.ClientRequestContext;
import javax.ws.rs.client.ClientRequestFilter;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.xml.bind.DatatypeConverter;
import org.apache.commons.lang.StringUtils;
import org.eclipse.smarthome.core.library.types.DecimalType;
import org.eclipse.smarthome.core.library.types.IncreaseDecreaseType;
import org.eclipse.smarthome.core.library.types.OnOffType;
import org.eclipse.smarthome.core.library.types.PercentType;
import org.eclipse.smarthome.core.library.types.StringType;
import org.eclipse.smarthome.core.storage.Storage;
import org.eclipse.smarthome.core.storage.StorageService;
import org.eclipse.smarthome.core.thing.ChannelUID;
import org.eclipse.smarthome.core.thing.Thing;
import org.eclipse.smarthome.core.thing.ThingStatus;
import org.eclipse.smarthome.core.thing.ThingStatusDetail;
import org.eclipse.smarthome.core.thing.binding.BaseThingHandler;
import org.eclipse.smarthome.core.types.Command;
import org.eclipse.smarthome.core.types.RefreshType;
import org.eclipse.smarthome.core.types.State;
import org.eclipse.smarthome.core.types.UnDefType;
import org.glassfish.jersey.client.ClientProperties;
import org.openhab.binding.tesla.TeslaBindingConstants;
import org.openhab.binding.tesla.TeslaBindingConstants.EventKeys;
import org.openhab.binding.tesla.internal.TeslaChannelSelectorProxy;
import org.openhab.binding.tesla.internal.TeslaChannelSelectorProxy.TeslaChannelSelector;
import org.openhab.binding.tesla.internal.protocol.ChargeState;
import org.openhab.binding.tesla.internal.protocol.ClimateState;
import org.openhab.binding.tesla.internal.protocol.DriveState;
import org.openhab.binding.tesla.internal.protocol.GUIState;
import org.openhab.binding.tesla.internal.protocol.TokenRequest;
import org.openhab.binding.tesla.internal.protocol.TokenRequestPassword;
import org.openhab.binding.tesla.internal.protocol.TokenRequestRefreshToken;
import org.openhab.binding.tesla.internal.protocol.TokenResponse;
import org.openhab.binding.tesla.internal.protocol.Vehicle;
import org.openhab.binding.tesla.internal.protocol.VehicleState;
import org.openhab.binding.tesla.internal.throttler.QueueChannelThrottler;
import org.openhab.binding.tesla.internal.throttler.Rate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
/**
* The {@link TeslaHandler} is responsible for handling commands, which are sent
* to one of the channels.
*
* @author Karel Goderis - Initial contribution
* @author Nicolai Grødum - Adding token based auth
*/
public class TeslaHandler extends BaseThingHandler {
private static final int EVENT_STREAM_CONNECT_TIMEOUT = 3000;
private static final int EVENT_STREAM_READ_TIMEOUT = 200000;
private static final int EVENT_TIMESTAMP_AGE_LIMIT = 3000;
private static final int EVENT_TIMESTAMP_MAX_DELTA = 10000;
private static final int FAST_STATUS_REFRESH_INTERVAL = 15000;
private static final int SLOW_STATUS_REFRESH_INTERVAL = 60000;
private static final int CONNECT_RETRY_INTERVAL = 15000;
private static final int API_MAXIMUM_ERRORS_IN_INTERVAL = 2;
private static final int API_ERROR_INTERVAL_SECONDS = 15;
private static final int EVENT_MAXIMUM_ERRORS_IN_INTERVAL = 10;
private static final int EVENT_ERROR_INTERVAL_SECONDS = 15;
private final Logger logger = LoggerFactory.getLogger(TeslaHandler.class);
// Vehicle state variables
protected Vehicle vehicle;
protected String vehicleJSON;
protected DriveState driveState;
protected GUIState guiState;
protected VehicleState vehicleState;
protected ChargeState chargeState;
protected ClimateState climateState;
// REST Client API variables
protected final Client teslaClient = ClientBuilder.newClient();
protected Client eventClient = ClientBuilder.newClient();
public final WebTarget teslaTarget = teslaClient.target(TESLA_OWNERS_URI);
public final WebTarget tokenTarget = teslaTarget.path(TESLA_ACCESS_TOKEN_URI);
public final WebTarget vehiclesTarget = teslaTarget.path(API_VERSION).path(VEHICLES);
public final WebTarget vehicleTarget = vehiclesTarget.path(VEHICLE_ID_PATH);
public final WebTarget dataRequestTarget = vehicleTarget.path(DATA_REQUEST_PATH);
public final WebTarget commandTarget = vehicleTarget.path(COMMAND_PATH);
protected WebTarget eventTarget;
// Threading and Job related variables
protected ScheduledFuture<?> connectJob;
protected Thread eventThread;
protected ScheduledFuture<?> fastStateJob;
protected ScheduledFuture<?> slowStateJob;
protected QueueChannelThrottler stateThrottler;
protected boolean allowWakeUp = true;
protected long lastTimeStamp;
protected long apiIntervalTimestamp;
protected int apiIntervalErrors;
protected long eventIntervalTimestamp;
protected int eventIntervalErrors;
protected ReentrantLock lock;
private StorageService storageService;
protected Gson gson = new Gson();
protected TeslaChannelSelectorProxy teslaChannelSelectorProxy = new TeslaChannelSelectorProxy();
private TokenResponse logonToken;
public TeslaHandler(Thing thing, StorageService storageService) {
super(thing);
this.storageService = storageService;
}
@Override
public void initialize() {
logger.trace("Initializing the Tesla handler for {}", this.getStorageKey());
updateStatus(ThingStatus.UNKNOWN);
lock = new ReentrantLock();
lock.lock();
try {
if (connectJob == null || connectJob.isCancelled()) {
connectJob = scheduler.scheduleWithFixedDelay(connectRunnable, 0, CONNECT_RETRY_INTERVAL,
TimeUnit.MILLISECONDS);
}
eventThread = new Thread(eventRunnable, "ESH-Tesla-Event Stream-" + getThing().getUID());
eventThread.start();
Map<Object, Rate> channels = new HashMap<>();
channels.put(TESLA_DATA_THROTTLE, new Rate(1, 1, TimeUnit.SECONDS));
channels.put(TESLA_COMMAND_THROTTLE, new Rate(20, 1, TimeUnit.MINUTES));
Rate firstRate = new Rate(20, 1, TimeUnit.MINUTES);
Rate secondRate = new Rate(200, 10, TimeUnit.MINUTES);
stateThrottler = new QueueChannelThrottler(firstRate, scheduler, channels);
stateThrottler.addRate(secondRate);
if (fastStateJob == null || fastStateJob.isCancelled()) {
fastStateJob = scheduler.scheduleWithFixedDelay(fastStateRunnable, 0, FAST_STATUS_REFRESH_INTERVAL,
TimeUnit.MILLISECONDS);
}
if (slowStateJob == null || slowStateJob.isCancelled()) {
slowStateJob = scheduler.scheduleWithFixedDelay(slowStateRunnable, 0, SLOW_STATUS_REFRESH_INTERVAL,
TimeUnit.MILLISECONDS);
}
} finally {
lock.unlock();
}
}
@Override
public void dispose() {
logger.trace("Disposing the Tesla handler for {}", getThing().getUID());
lock.lock();
try {
if (fastStateJob != null && !fastStateJob.isCancelled()) {
fastStateJob.cancel(true);
fastStateJob = null;
}
if (slowStateJob != null && !slowStateJob.isCancelled()) {
slowStateJob.cancel(true);
slowStateJob = null;
}
if (!eventThread.isInterrupted()) {
eventThread.interrupt();
eventThread = null;
}
if (connectJob != null && !connectJob.isCancelled()) {
connectJob.cancel(true);
connectJob = null;
}
} finally {
lock.unlock();
}
}
@Override
public void handleCommand(ChannelUID channelUID, Command command) {
String channelID = channelUID.getId();
TeslaChannelSelector selector = TeslaChannelSelector.getValueSelectorFromChannelID(channelID);
if (command instanceof RefreshType) {
if (isAwake()) {
// Request the state of all known variables. This is sub-optimal, but the requests get scheduled and
// throttled so we are safe not to break the Tesla SLA
requestData(TESLA_DRIVE_STATE);
requestData(TESLA_VEHICLE_STATE);
requestData(TESLA_CHARGE_STATE);
requestData(TESLA_CLIMATE_STATE);
requestData(TESLA_GUI_STATE);
}
} else {
if (selector != null) {
try {
switch (selector) {
case CHARGE_LIMIT_SOC: {
if (command instanceof PercentType) {
setChargeLimit(((PercentType) command).intValue());
} else if (command instanceof OnOffType && command == OnOffType.ON) {
setChargeLimit(100);
} else if (command instanceof OnOffType && command == OnOffType.OFF) {
setChargeLimit(0);
} else if (command instanceof IncreaseDecreaseType
&& command == IncreaseDecreaseType.INCREASE) {
setChargeLimit(Math.min(chargeState.charge_limit_soc + 1, 100));
} else if (command instanceof IncreaseDecreaseType
&& command == IncreaseDecreaseType.DECREASE) {
setChargeLimit(Math.max(chargeState.charge_limit_soc - 1, 0));
}
break;
}
case TEMPERATURE: {
if (command instanceof DecimalType) {
if (getThing().getProperties().containsKey("temperatureunits")
&& getThing().getProperties().get("temperatureunits").equals("F")) {
float fTemp = ((DecimalType) command).floatValue();
float cTemp = ((fTemp - 32.0f) * 5.0f / 9.0f);
setTemperature(cTemp);
} else {
setTemperature(((DecimalType) command).floatValue());
}
}
break;
}
case SUN_ROOF_STATE: {
if (command instanceof StringType) {
setSunroof(command.toString());
}
break;
}
case SUN_ROOF: {
if (command instanceof PercentType) {
moveSunroof(((PercentType) command).intValue());
} else if (command instanceof OnOffType && command == OnOffType.ON) {
moveSunroof(100);
} else if (command instanceof OnOffType && command == OnOffType.OFF) {
moveSunroof(0);
} else if (command instanceof IncreaseDecreaseType
&& command == IncreaseDecreaseType.INCREASE) {
moveSunroof(Math.min(chargeState.charge_limit_soc + 1, 100));
} else if (command instanceof IncreaseDecreaseType
&& command == IncreaseDecreaseType.DECREASE) {
moveSunroof(Math.max(chargeState.charge_limit_soc - 1, 0));
}
break;
}
case CHARGE_TO_MAX: {
if (command instanceof OnOffType) {
if (((OnOffType) command) == OnOffType.ON) {
setMaxRangeCharging(true);
} else {
setMaxRangeCharging(false);
}
}
break;
}
case CHARGE: {
if (command instanceof OnOffType) {
if (((OnOffType) command) == OnOffType.ON) {
charge(true);
} else {
charge(false);
}
}
break;
}
case FLASH: {
if (command instanceof OnOffType) {
if (((OnOffType) command) == OnOffType.ON) {
flashLights();
}
}
break;
}
case HONK_HORN: {
if (command instanceof OnOffType) {
if (((OnOffType) command) == OnOffType.ON) {
honkHorn();
}
}
break;
}
case CHARGEPORT: {
if (command instanceof OnOffType) {
if (((OnOffType) command) == OnOffType.ON) {
openChargePort();
}
}
break;
}
case DOOR_LOCK: {
if (command instanceof OnOffType) {
if (((OnOffType) command) == OnOffType.ON) {
lockDoors(true);
} else {
lockDoors(false);
}
}
break;
}
case AUTO_COND: {
if (command instanceof OnOffType) {
if (((OnOffType) command) == OnOffType.ON) {
autoConditioning(true);
} else {
autoConditioning(false);
}
}
break;
}
case WAKEUP: {
if (command instanceof OnOffType) {
if (((OnOffType) command) == OnOffType.ON) {
wakeUp();
}
}
break;
}
case ALLOWWAKEUP: {
if (command instanceof OnOffType) {
allowWakeUp = (((OnOffType) command) == OnOffType.ON);
}
break;
}
case ENABLEEVENTS: {
if (command instanceof OnOffType) {
if (((OnOffType) command) == OnOffType.ON) {
if (eventThread == null) {
eventThread = new Thread(eventRunnable,
"ESH-Tesla-Event Stream-" + getThing().getUID());
eventThread.start();
}
} else {
if (eventThread != null) {
eventThread.interrupt();
eventThread = null;
}
}
}
break;
}
default:
break;
}
return;
} catch (IllegalArgumentException e) {
logger.warn(
"An error occurred while trying to set the read-only variable associated with channel '{}' to '{}'",
channelID, command.toString());
}
}
}
}
public void sendCommand(String command, String payLoad, WebTarget target) {
Request request = new Request(command, payLoad, target);
if (stateThrottler != null) {
stateThrottler.submit(TESLA_COMMAND_THROTTLE, request);
}
}
public void sendCommand(String command) {
sendCommand(command, "{}");
}
public void sendCommand(String command, String payLoad) {
Request request = new Request(command, payLoad, commandTarget);
if (stateThrottler != null) {
stateThrottler.submit(TESLA_COMMAND_THROTTLE, request);
}
}
public void sendCommand(String command, WebTarget target) {
Request request = new Request(command, "{}", target);
if (stateThrottler != null) {
stateThrottler.submit(TESLA_COMMAND_THROTTLE, request);
}
}
public void requestData(String command, String payLoad) {
Request request = new Request(command, payLoad, dataRequestTarget);
if (stateThrottler != null) {
stateThrottler.submit(TESLA_DATA_THROTTLE, request);
}
}
public void requestData(String command) {
requestData(command, null);
}
public void queryVehicle(String parameter) {
WebTarget target = vehicleTarget.path(parameter);
sendCommand(parameter, null, target);
}
protected String invokeAndParse(String command, String payLoad, WebTarget target) {
logger.debug("Invoking: {}", command);
if (vehicle.id != null) {
Response response;
if (payLoad != null) {
if (command != null) {
response = target.resolveTemplate("cmd", command).resolveTemplate("vid", vehicle.id).request()
.header("Authorization", "Bearer " + logonToken.access_token)
.post(Entity.entity(payLoad, MediaType.APPLICATION_JSON_TYPE));
} else {
response = target.resolveTemplate("vid", vehicle.id).request()
.header("Authorization", "Bearer " + logonToken.access_token)
.post(Entity.entity(payLoad, MediaType.APPLICATION_JSON_TYPE));
}
} else {
if (command != null) {
response = target.resolveTemplate("cmd", command).resolveTemplate("vid", vehicle.id)
.request(MediaType.APPLICATION_JSON_TYPE)
.header("Authorization", "Bearer " + logonToken.access_token).get();
} else {
response = target.resolveTemplate("vid", vehicle.id).request(MediaType.APPLICATION_JSON_TYPE)
.header("Authorization", "Bearer " + logonToken.access_token).get();
}
}
JsonParser parser = new JsonParser();
if (response != null && response.getStatus() == 200) {
try {
JsonObject jsonObject = parser.parse(response.readEntity(String.class)).getAsJsonObject();
logger.trace("Request : {}:{}:{} yields {}", new Object[] { command, payLoad, target.toString(),
jsonObject.get("response").toString() });
return jsonObject.get("response").toString();
} catch (Exception e) {
logger.error("An exception occurred while invoking a REST request : '{}'", e.getMessage());
}
} else {
logger.error("An error occurred while communicating with the vehicle during request {} : {}:{}",
new Object[] { command, (response != null) ? response.getStatus() : "",
(response != null) ? response.getStatusInfo() : "No Response" });
if (apiIntervalErrors == 0 && response != null && response.getStatus() == 401) {
authenticate();
}
apiIntervalErrors++;
if (apiIntervalErrors >= API_MAXIMUM_ERRORS_IN_INTERVAL) {
logger.warn("Reached the maximum number of errors ({}) for the current interval ({} seconds)",
API_MAXIMUM_ERRORS_IN_INTERVAL, API_ERROR_INTERVAL_SECONDS);
updateStatus(ThingStatus.OFFLINE, ThingStatusDetail.COMMUNICATION_ERROR);
eventClient.close();
return null;
}
if ((System.currentTimeMillis() - apiIntervalTimestamp) > 1000 * API_ERROR_INTERVAL_SECONDS) {
logger.trace("Resetting the error counter. ({} errors in the last interval)", apiIntervalErrors);
apiIntervalTimestamp = System.currentTimeMillis();
apiIntervalErrors = 0;
}
}
}
return null;
}
public void parseAndUpdate(String request, String payLoad, String result) {
JsonParser parser = new JsonParser();
JsonObject jsonObject = null;
try {
if (request != null && result != null && !"null".equals(result)) {
// first, update state objects
switch (request) {
case TESLA_DRIVE_STATE: {
driveState = gson.fromJson(result, DriveState.class);
break;
}
case TESLA_GUI_STATE: {
guiState = gson.fromJson(result, GUIState.class);
break;
}
case TESLA_VEHICLE_STATE: {
vehicleState = gson.fromJson(result, VehicleState.class);
break;
}
case TESLA_CHARGE_STATE: {
chargeState = gson.fromJson(result, ChargeState.class);
if (chargeState.charging_state != null && "Charging".equals(chargeState.charging_state)) {
updateState(CHANNEL_CHARGE, OnOffType.ON);
} else {
updateState(CHANNEL_CHARGE, OnOffType.OFF);
}
break;
}
case TESLA_CLIMATE_STATE: {
climateState = gson.fromJson(result, ClimateState.class);
break;
}
}
// secondly, reformat the response string to a JSON compliant
// object for some specific non-JSON compatible requests
switch (request) {
case TESLA_MOBILE_ENABLED_STATE: {
jsonObject = new JsonObject();
jsonObject.addProperty(TESLA_MOBILE_ENABLED_STATE, result);
break;
}
default: {
jsonObject = parser.parse(result).getAsJsonObject();
break;
}
}
}
// process the result
if (jsonObject != null && result != null && !"null".equals(result)) {
// deal with responses for "set" commands, which get confirmed
// positively, or negatively, in which case a reason for failure
// is provided
if (jsonObject.get("reason") != null && jsonObject.get("reason").getAsString() != null) {
boolean requestResult = jsonObject.get("result").getAsBoolean();
logger.debug("The request ({}) execution was {}, and reported '{}'", new Object[] { request,
requestResult ? "successful" : "not successful", jsonObject.get("reason").getAsString() });
} else {
Set<Map.Entry<String, JsonElement>> entrySet = jsonObject.entrySet();
long resultTimeStamp = 0;
for (Map.Entry<String, JsonElement> entry : entrySet) {
if ("timestamp".equals(entry.getKey())) {
resultTimeStamp = Long.valueOf(entry.getValue().getAsString());
if (logger.isTraceEnabled()) {
Date date = new Date(resultTimeStamp);
SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS");
logger.trace("The request result timestamp is {}", dateFormatter.format(date));
}
break;
}
}
try {
lock.lock();
boolean proceed = true;
if (resultTimeStamp < lastTimeStamp && request == TESLA_DRIVE_STATE) {
proceed = false;
}
if (proceed) {
for (Map.Entry<String, JsonElement> entry : entrySet) {
try {
TeslaChannelSelector selector = TeslaChannelSelector
.getValueSelectorFromRESTID(entry.getKey());
if (!selector.isProperty()) {
if (!entry.getValue().isJsonNull()) {
updateState(selector.getChannelID(), teslaChannelSelectorProxy.getState(
entry.getValue().getAsString(), selector, editProperties()));
} else {
updateState(selector.getChannelID(), UnDefType.UNDEF);
}
} else {
if (!entry.getValue().isJsonNull()) {
Map<String, String> properties = editProperties();
properties.put(selector.getChannelID(), entry.getValue().getAsString());
updateProperties(properties);
}
}
} catch (IllegalArgumentException e) {
logger.trace("The variable/value pair '{}':'{}' is not (yet) supported",
entry.getKey(), entry.getValue());
} catch (ClassCastException | IllegalStateException e) {
logger.trace("An exception occurred while converting the JSON data : '{}'",
e.getMessage(), e);
}
}
} else {
logger.warn("The result for request '{}' is discarded due to an out of sync timestamp",
request);
}
} finally {
lock.unlock();
}
}
}
} catch (Exception p) {
logger.error("An exception occurred while parsing data received from the vehicle: '{}'", p.getMessage());
}
}
protected boolean isAwake() {
return vehicle != null && !"asleep".equals(vehicle.state) && vehicle.vehicle_id != null;
}
protected boolean isOnline() {
return vehicle != null && "online".equals(vehicle.state) && vehicle.vehicle_id != null;
}
protected boolean isInMotion() {
if (driveState != null) {
if (driveState.speed != null && driveState.shift_state != null) {
return !"Undefined".equals(driveState.speed)
&& (!"P".equals(driveState.shift_state) || !"Undefined".equals(driveState.shift_state));
}
}
return false;
}
public void setChargeLimit(int percent) {
JsonObject payloadObject = new JsonObject();
payloadObject.addProperty("percent", percent);
sendCommand(TESLA_COMMAND_SET_CHARGE_LIMIT, gson.toJson(payloadObject), commandTarget);
requestData(TESLA_CHARGE_STATE);
}
public void setSunroof(String state) {
JsonObject payloadObject = new JsonObject();
payloadObject.addProperty("state", state);
sendCommand(TESLA_COMMAND_SUN_ROOF, gson.toJson(payloadObject), commandTarget);
requestData(TESLA_VEHICLE_STATE);
}
public void moveSunroof(int percent) {
JsonObject payloadObject = new JsonObject();
payloadObject.addProperty("state", "move");
payloadObject.addProperty("percent", percent);
sendCommand(TESLA_COMMAND_SUN_ROOF, gson.toJson(payloadObject), commandTarget);
requestData(TESLA_VEHICLE_STATE);
}
public void setTemperature(float temperature) {
JsonObject payloadObject = new JsonObject();
payloadObject.addProperty("driver_temp", temperature);
payloadObject.addProperty("passenger_temp", temperature);
sendCommand(TESLA_COMMAND_SET_TEMP, gson.toJson(payloadObject), commandTarget);
requestData(TESLA_CLIMATE_STATE);
}
public void setMaxRangeCharging(boolean b) {
if (b) {
sendCommand(TESLA_COMMAND_CHARGE_MAX, commandTarget);
} else {
sendCommand(TESLA_COMMAND_CHARGE_STD, commandTarget);
}
requestData(TESLA_CHARGE_STATE);
}
public void charge(boolean b) {
if (b) {
sendCommand(TESLA_COMMAND_CHARGE_START, commandTarget);
} else {
sendCommand(TESLA_COMMAND_CHARGE_STOP, commandTarget);
}
requestData(TESLA_CHARGE_STATE);
}
public void flashLights() {
sendCommand(TESLA_COMMAND_FLASH_LIGHTS, commandTarget);
}
public void honkHorn() {
sendCommand(TESLA_COMMAND_HONK_HORN, commandTarget);
}
public void openChargePort() {
sendCommand(TESLA_COMMAND_OPEN_CHARGE_PORT, commandTarget);
requestData(TESLA_CHARGE_STATE);
}
public void lockDoors(boolean b) {
if (b) {
sendCommand(TESLA_COMMAND_DOOR_LOCK, commandTarget);
} else {
sendCommand(TESLA_COMMAND_DOOR_UNLOCK, commandTarget);
}
requestData(TESLA_VEHICLE_STATE);
}
public void autoConditioning(boolean b) {
if (b) {
sendCommand(TESLA_COMMAND_AUTO_COND_START, commandTarget);
} else {
sendCommand(TESLA_COMMAND_AUTO_COND_STOP, commandTarget);
}
requestData(TESLA_CLIMATE_STATE);
}
public void wakeUp() {
sendCommand(TESLA_COMMAND_WAKE_UP);
}
protected Vehicle queryVehicle() {
// get a list of vehicles
Response response = vehiclesTarget.request(MediaType.APPLICATION_JSON_TYPE)
.header("Authorization", "Bearer " + logonToken.access_token).get();
logger.debug("Querying the vehicle : Response : {}:{}", response.getStatus(), response.getStatusInfo());
JsonParser parser = new JsonParser();
JsonObject jsonObject = parser.parse(response.readEntity(String.class)).getAsJsonObject();
Vehicle[] vehicleArray = gson.fromJson(jsonObject.getAsJsonArray("response"), Vehicle[].class);
for (int i = 0; i < vehicleArray.length; i++) {
logger.debug("Querying the vehicle : VIN : {}", vehicleArray[i].vin);
if (vehicleArray[i].vin.equals(getConfig().get(VIN))) {
vehicleJSON = gson.toJson(vehicleArray[i]);
parseAndUpdate("queryVehicle", null, vehicleJSON);
return vehicleArray[i];
}
}
return null;
}
private String getStorageKey() {
return this.getThing().getUID().getId();
}
private ThingStatusDetail authenticate() {
Storage<Object> storage = storageService.getStorage(TeslaBindingConstants.BINDING_ID);
String storedToken = (String) storage.get(getStorageKey());
TokenResponse token = storedToken == null ? null : gson.fromJson(storedToken, TokenResponse.class);
SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
boolean hasExpired = true;
if (token != null) {
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(token.created_at * 1000);
logger.info("Found a request token created at {}", dateFormatter.format(calendar.getTime()));
calendar.setTimeInMillis(token.created_at * 1000 + 60 * token.expires_in);
Date now = new Date();
if (calendar.getTime().before(now)) {
logger.info("The token has expired at {}", dateFormatter.format(calendar.getTime()));
hasExpired = true;
} else {
hasExpired = false;
}
}
String username = (String) getConfig().get(USERNAME);
if (!StringUtils.isEmpty(username) && hasExpired) {
String password = (String) getConfig().get(PASSWORD);
return authenticate(username, password);
}
if (token == null || StringUtils.isEmpty(token.refresh_token)) {
return ThingStatusDetail.CONFIGURATION_ERROR;
}
TokenRequestRefreshToken tokenRequest = null;
try {
tokenRequest = new TokenRequestRefreshToken(token.refresh_token);
} catch (GeneralSecurityException e) {
logger.error("An exception occurred while requesting a new token : '{}'", e.getMessage(), e);
}
String payLoad = gson.toJson(tokenRequest);
Response response = tokenTarget.request().post(Entity.entity(payLoad, MediaType.APPLICATION_JSON_TYPE));
if (response == null) {
logger.debug("Authenticating : Response was null");
} else {
logger.debug("Authenticating : Response : {}:{}", response.getStatus(), response.getStatusInfo());
if (response.getStatus() == 200 && response.hasEntity()) {
String responsePayLoad = response.readEntity(String.class);
TokenResponse tokenResponse = gson.fromJson(responsePayLoad.trim(), TokenResponse.class);
if (tokenResponse != null && !StringUtils.isEmpty(tokenResponse.access_token)) {
storage.put(getStorageKey(), gson.toJson(tokenResponse));
this.logonToken = tokenResponse;
return ThingStatusDetail.NONE;
}
return ThingStatusDetail.NONE;
} else if (response.getStatus() == 401) {
if (!StringUtils.isEmpty(username)) {
String password = (String) getConfig().get(PASSWORD);
return authenticate(username, password);
} else {
return ThingStatusDetail.CONFIGURATION_ERROR;
}
} else if (response.getStatus() == 503 || response.getStatus() == 502) {
return ThingStatusDetail.COMMUNICATION_ERROR;
}
}
return ThingStatusDetail.CONFIGURATION_ERROR;
}
private ThingStatusDetail authenticate(String username, String password) {
TokenRequest token = null;
try {
token = new TokenRequestPassword(username, password);
} catch (GeneralSecurityException e) {
logger.error("An exception occurred while building a password request token : '{}'", e.getMessage(), e);
}
if (token != null) {
String payLoad = gson.toJson(token);
Response response = tokenTarget.request().post(Entity.entity(payLoad, MediaType.APPLICATION_JSON_TYPE));
if (response != null) {
logger.debug("Authenticating : Response : {}:{}", response.getStatus(), response.getStatusInfo());
if (response.getStatus() == 200 && response.hasEntity()) {
String responsePayLoad = response.readEntity(String.class);
TokenResponse tokenResponse = gson.fromJson(responsePayLoad.trim(), TokenResponse.class);
if (StringUtils.isNotEmpty(tokenResponse.access_token)) {
Storage<Object> storage = storageService.getStorage(TeslaBindingConstants.BINDING_ID);
storage.put(getStorageKey(), gson.toJson(tokenResponse));
this.logonToken = tokenResponse;
return ThingStatusDetail.NONE;
}
} else if (response.getStatus() == 401) {
return ThingStatusDetail.CONFIGURATION_ERROR;
} else if (response.getStatus() == 503 || response.getStatus() == 502) {
return ThingStatusDetail.COMMUNICATION_ERROR;
}
}
}
return ThingStatusDetail.CONFIGURATION_ERROR;
}
protected Runnable fastStateRunnable = () -> {
if (getThing().getStatus() == ThingStatus.ONLINE) {
if (isAwake()) {
requestData(TESLA_DRIVE_STATE);
requestData(TESLA_VEHICLE_STATE);
} else {
if (vehicle != null && allowWakeUp) {
wakeUp();
} else {
vehicle = queryVehicle();
}
}
}
if (allowWakeUp) {
updateState(CHANNEL_ALLOWWAKEUP, OnOffType.ON);
} else {
updateState(CHANNEL_ALLOWWAKEUP, OnOffType.OFF);
}
if (eventThread != null) {
updateState(CHANNEL_ENABLEEVENTS, OnOffType.ON);
} else {
updateState(CHANNEL_ENABLEEVENTS, OnOffType.OFF);
}
};
protected Runnable slowStateRunnable = () -> {
if (getThing().getStatus() == ThingStatus.ONLINE) {
if (isAwake()) {
requestData(TESLA_CHARGE_STATE);
requestData(TESLA_CLIMATE_STATE);
requestData(TESLA_GUI_STATE);
queryVehicle(TESLA_MOBILE_ENABLED_STATE);
parseAndUpdate("queryVehicle", null, vehicleJSON);
} else {
if (vehicle != null && allowWakeUp) {
wakeUp();
} else {
vehicle = queryVehicle();
}
}
}
};
protected Runnable connectRunnable = () -> {
try {
lock.lock();
if (getThing().getStatus() != ThingStatus.ONLINE) {
logger.debug("Setting up an authenticated connection to the Tesla back-end");
ThingStatusDetail authenticationResult = authenticate();
if (authenticationResult != ThingStatusDetail.NONE) {
updateStatus(ThingStatus.OFFLINE, authenticationResult);
} else {
// get a list of vehicles
Response response = vehiclesTarget.request(MediaType.APPLICATION_JSON_TYPE)
.header("Authorization", "Bearer " + logonToken.access_token).get();
if (response != null && response.getStatus() == 200 && response.hasEntity()) {
if ((vehicle = queryVehicle()) != null) {
logger.debug("Found the vehicle with VIN '{}' in the list of vehicles you own",
getConfig().get(VIN));
updateStatus(ThingStatus.ONLINE);
apiIntervalErrors = 0;
apiIntervalTimestamp = System.currentTimeMillis();
} else {
logger.warn("Unable to find the vehicle with VIN '{}' in the list of vehicles you own",
getConfig().get(VIN));
updateStatus(ThingStatus.OFFLINE);
}
} else {
if (response != null) {
logger.error("Error fetching the list of vehicles : {}:{}", response.getStatus(),
response.getStatusInfo());
updateStatus(ThingStatus.OFFLINE);
}
}
}
}
} catch (Exception e) {
logger.error("An exception occurred while connecting to the Tesla back-end: '{}'", e.getMessage());
} finally {
lock.unlock();
}
};
protected Runnable eventRunnable = new Runnable() {
Response eventResponse;
BufferedReader eventBufferedReader;
InputStreamReader eventInputStreamReader;
boolean isEstablished = false;
protected boolean establishEventStream() {
try {
if (!isEstablished) {
eventBufferedReader = null;
eventClient = ClientBuilder.newClient()
.property(ClientProperties.CONNECT_TIMEOUT, EVENT_STREAM_CONNECT_TIMEOUT)
.property(ClientProperties.READ_TIMEOUT, EVENT_STREAM_READ_TIMEOUT)
.register(new Authenticator((String) getConfig().get(USERNAME), vehicle.tokens[0]));
eventTarget = eventClient.target(TESLA_EVENT_URI).path(vehicle.vehicle_id + "/").queryParam(
"values", StringUtils.join(EventKeys.values(), ',', 1, EventKeys.values().length));
eventResponse = eventTarget.request(MediaType.TEXT_PLAIN_TYPE).get();
logger.debug("Event Stream : Establishing the event stream : Response : {}:{}",
eventResponse.getStatus(), eventResponse.getStatusInfo());
if (eventResponse.getStatus() == 200) {
InputStream dummy = (InputStream) eventResponse.getEntity();
eventInputStreamReader = new InputStreamReader(dummy);
eventBufferedReader = new BufferedReader(eventInputStreamReader);
isEstablished = true;
} else if (eventResponse.getStatus() == 401) {
isEstablished = false;
} else {
isEstablished = false;
}
if (!isEstablished) {
eventIntervalErrors++;
if (eventIntervalErrors >= EVENT_MAXIMUM_ERRORS_IN_INTERVAL) {
logger.warn(
"Reached the maximum number of errors ({}) for the current interval ({} seconds)",
EVENT_MAXIMUM_ERRORS_IN_INTERVAL, EVENT_ERROR_INTERVAL_SECONDS);
updateStatus(ThingStatus.OFFLINE, ThingStatusDetail.COMMUNICATION_ERROR);
eventClient.close();
}
if ((System.currentTimeMillis() - eventIntervalTimestamp) > 1000
* EVENT_ERROR_INTERVAL_SECONDS) {
logger.trace("Resetting the error counter. ({} errors in the last interval)",
eventIntervalErrors);
eventIntervalTimestamp = System.currentTimeMillis();
eventIntervalErrors = 0;
}
}
}
} catch (Exception e) {
logger.error(
"Event Stream : An exception occurred while establishing the event stream for the vehicle: '{}'",
e.getMessage());
isEstablished = false;
}
return isEstablished;
}
@Override
public void run() {
while (true) {
try {
if (getThing().getStatus() == ThingStatus.ONLINE) {
if (isAwake()) {
if (establishEventStream()) {
String line = eventBufferedReader.readLine();
while (line != null) {
logger.debug("Event Stream : Received an event: '{}'", line);
String vals[] = line.split(",");
long currentTimeStamp = Long.valueOf(vals[0]);
long systemTimeStamp = System.currentTimeMillis();
if (logger.isDebugEnabled()) {
SimpleDateFormat dateFormatter = new SimpleDateFormat(
"yyyy-MM-dd'T'HH:mm:ss.SSS");
logger.debug("STS {} CTS {} Delta {}",
dateFormatter.format(new Date(systemTimeStamp)),
dateFormatter.format(new Date(currentTimeStamp)),
systemTimeStamp - currentTimeStamp);
}
if (systemTimeStamp - currentTimeStamp < EVENT_TIMESTAMP_AGE_LIMIT) {
if (currentTimeStamp > lastTimeStamp) {
lastTimeStamp = Long.valueOf(vals[0]);
if (logger.isDebugEnabled()) {
SimpleDateFormat dateFormatter = new SimpleDateFormat(
"yyyy-MM-dd'T'HH:mm:ss.SSS");
logger.debug("Event Stream : Event stamp is {}",
dateFormatter.format(new Date(lastTimeStamp)));
}
for (int i = 0; i < EventKeys.values().length; i++) {
TeslaChannelSelector selector = TeslaChannelSelector
.getValueSelectorFromRESTID((EventKeys.values()[i]).toString());
if (!selector.isProperty()) {
State newState = teslaChannelSelectorProxy.getState(vals[i],
selector, editProperties());
if (newState != null && !"".equals(vals[i])) {
updateState(selector.getChannelID(), newState);
} else {
updateState(selector.getChannelID(), UnDefType.UNDEF);
}
} else {
Map<String, String> properties = editProperties();
properties.put(selector.getChannelID(),
(selector.getState(vals[i])).toString());
updateProperties(properties);
}
}
} else {
if (logger.isDebugEnabled()) {
SimpleDateFormat dateFormatter = new SimpleDateFormat(
"yyyy-MM-dd'T'HH:mm:ss.SSS");
logger.debug(
"Event Stream : Discarding an event with an out of sync timestamp {} (last is {})",
dateFormatter.format(new Date(currentTimeStamp)),
dateFormatter.format(new Date(lastTimeStamp)));
}
}
} else {
if (logger.isDebugEnabled()) {
SimpleDateFormat dateFormatter = new SimpleDateFormat(
"yyyy-MM-dd'T'HH:mm:ss.SSS");
logger.debug(
"Event Stream : Discarding an event that differs {} ms from the system time: {} (system is {})",
systemTimeStamp - currentTimeStamp,
dateFormatter.format(currentTimeStamp),
dateFormatter.format(systemTimeStamp));
}
if (systemTimeStamp - currentTimeStamp > EVENT_TIMESTAMP_MAX_DELTA) {
if (logger.isTraceEnabled()) {
logger.trace("Event Stream : The event stream will be reset");
}
isEstablished = false;
}
}
line = eventBufferedReader.readLine();
}
if (line == null) {
if (logger.isTraceEnabled()) {
logger.trace("Event Stream : The end of stream was reached");
}
isEstablished = false;
}
}
} else {
logger.debug("Event stream : The vehicle is not awake");
if (vehicle != null && allowWakeUp) {
// wake up the vehicle until streaming token <> 0
logger.debug("Event stream : Waking up the vehicle");
wakeUp();
} else {
logger.debug("Event stream : Querying the vehicle");
vehicle = queryVehicle();
}
}
} else {
Thread.sleep(250);
}
} catch (IOException | NumberFormatException e) {
if (logger.isErrorEnabled()) {
logger.error("Event Stream : An exception occurred while reading events : '{}'",
e.getMessage());
}
isEstablished = false;
} catch (InterruptedException e) {
isEstablished = false;
}
if (Thread.interrupted()) {
logger.debug("Event Stream : the Event Stream was interrupted");
return;
}
}
}
};
protected class Request implements Runnable {
private String request;
private String payLoad;
private WebTarget target;
public Request(String request, String payLoad, WebTarget target) {
this.request = request;
this.payLoad = payLoad;
this.target = target;
}
@Override
public void run() {
try {
String result = "";
if (isAwake() && getThing().getStatus() == ThingStatus.ONLINE) {
result = invokeAndParse(request, payLoad, target);
}
if (result != null && !"".equals(result)) {
parseAndUpdate(request, payLoad, result);
}
} catch (Exception e) {
logger.error("An exception occurred while executing a request to the vehicle: '{}'", e.getMessage());
}
}
}
protected class Authenticator implements ClientRequestFilter {
private final String user;
private final String password;
public Authenticator(String user, String password) {
this.user = user;
this.password = password;
}
@Override
public void filter(ClientRequestContext requestContext) throws IOException {
MultivaluedMap<String, Object> headers = requestContext.getHeaders();
final String basicAuthentication = getBasicAuthentication();
headers.add("Authorization", basicAuthentication);
}
private String getBasicAuthentication() {
String token = this.user + ":" + this.password;
try {
return "Basic " + DatatypeConverter.printBase64Binary(token.getBytes("UTF-8"));
} catch (UnsupportedEncodingException ex) {
throw new IllegalStateException("Cannot encode with UTF-8", ex);
}
}
}
}
| addons/binding/org.openhab.binding.tesla/src/main/java/org/openhab/binding/tesla/handler/TeslaHandler.java | /**
* Copyright (c) 2010-2018 by the respective copyright holders.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.openhab.binding.tesla.handler;
import static org.openhab.binding.tesla.TeslaBindingConstants.*;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.security.GeneralSecurityException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantLock;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.ClientRequestContext;
import javax.ws.rs.client.ClientRequestFilter;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.xml.bind.DatatypeConverter;
import org.apache.commons.lang.StringUtils;
import org.eclipse.smarthome.core.library.types.DecimalType;
import org.eclipse.smarthome.core.library.types.IncreaseDecreaseType;
import org.eclipse.smarthome.core.library.types.OnOffType;
import org.eclipse.smarthome.core.library.types.PercentType;
import org.eclipse.smarthome.core.library.types.StringType;
import org.eclipse.smarthome.core.storage.Storage;
import org.eclipse.smarthome.core.storage.StorageService;
import org.eclipse.smarthome.core.thing.ChannelUID;
import org.eclipse.smarthome.core.thing.Thing;
import org.eclipse.smarthome.core.thing.ThingStatus;
import org.eclipse.smarthome.core.thing.ThingStatusDetail;
import org.eclipse.smarthome.core.thing.binding.BaseThingHandler;
import org.eclipse.smarthome.core.types.Command;
import org.eclipse.smarthome.core.types.RefreshType;
import org.eclipse.smarthome.core.types.State;
import org.eclipse.smarthome.core.types.UnDefType;
import org.glassfish.jersey.client.ClientProperties;
import org.openhab.binding.tesla.TeslaBindingConstants;
import org.openhab.binding.tesla.TeslaBindingConstants.EventKeys;
import org.openhab.binding.tesla.internal.TeslaChannelSelectorProxy;
import org.openhab.binding.tesla.internal.TeslaChannelSelectorProxy.TeslaChannelSelector;
import org.openhab.binding.tesla.internal.protocol.ChargeState;
import org.openhab.binding.tesla.internal.protocol.ClimateState;
import org.openhab.binding.tesla.internal.protocol.DriveState;
import org.openhab.binding.tesla.internal.protocol.GUIState;
import org.openhab.binding.tesla.internal.protocol.TokenRequest;
import org.openhab.binding.tesla.internal.protocol.TokenRequestPassword;
import org.openhab.binding.tesla.internal.protocol.TokenRequestRefreshToken;
import org.openhab.binding.tesla.internal.protocol.TokenResponse;
import org.openhab.binding.tesla.internal.protocol.Vehicle;
import org.openhab.binding.tesla.internal.protocol.VehicleState;
import org.openhab.binding.tesla.internal.throttler.QueueChannelThrottler;
import org.openhab.binding.tesla.internal.throttler.Rate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
/**
* The {@link TeslaHandler} is responsible for handling commands, which are sent
* to one of the channels.
*
* @author Karel Goderis - Initial contribution
* @author Nicolai Grødum - Adding token based auth
*/
public class TeslaHandler extends BaseThingHandler {
private static final int EVENT_STREAM_CONNECT_TIMEOUT = 3000;
private static final int EVENT_STREAM_READ_TIMEOUT = 200000;
private static final int EVENT_TIMESTAMP_AGE_LIMIT = 3000;
private static final int EVENT_TIMESTAMP_MAX_DELTA = 10000;
private static final int FAST_STATUS_REFRESH_INTERVAL = 15000;
private static final int SLOW_STATUS_REFRESH_INTERVAL = 60000;
private static final int CONNECT_RETRY_INTERVAL = 15000;
private static final int MAXIMUM_ERRORS_IN_INTERVAL = 2;
private static final int ERROR_INTERVAL_SECONDS = 15;
private final Logger logger = LoggerFactory.getLogger(TeslaHandler.class);
// Vehicle state variables
protected Vehicle vehicle;
protected String vehicleJSON;
protected DriveState driveState;
protected GUIState guiState;
protected VehicleState vehicleState;
protected ChargeState chargeState;
protected ClimateState climateState;
// REST Client API variables
protected final Client teslaClient = ClientBuilder.newClient();
protected Client eventClient = ClientBuilder.newClient();
public final WebTarget teslaTarget = teslaClient.target(TESLA_OWNERS_URI);
public final WebTarget tokenTarget = teslaTarget.path(TESLA_ACCESS_TOKEN_URI);
public final WebTarget vehiclesTarget = teslaTarget.path(API_VERSION).path(VEHICLES);
public final WebTarget vehicleTarget = vehiclesTarget.path(VEHICLE_ID_PATH);
public final WebTarget dataRequestTarget = vehicleTarget.path(DATA_REQUEST_PATH);
public final WebTarget commandTarget = vehicleTarget.path(COMMAND_PATH);
protected WebTarget eventTarget;
// Threading and Job related variables
protected ScheduledFuture<?> connectJob;
protected Thread eventThread;
protected ScheduledFuture<?> fastStateJob;
protected ScheduledFuture<?> slowStateJob;
protected QueueChannelThrottler stateThrottler;
protected boolean allowWakeUp = true;
protected long lastTimeStamp;
protected long intervalTimestamp = 0;
protected int intervalErrors = 0;
protected ReentrantLock lock;
private StorageService storageService;
protected Gson gson = new Gson();
protected TeslaChannelSelectorProxy teslaChannelSelectorProxy = new TeslaChannelSelectorProxy();
private TokenResponse logonToken;
public TeslaHandler(Thing thing, StorageService storageService) {
super(thing);
this.storageService = storageService;
}
@Override
public void initialize() {
logger.trace("Initializing the Tesla handler for {}", this.getStorageKey());
updateStatus(ThingStatus.UNKNOWN);
lock = new ReentrantLock();
lock.lock();
try {
if (connectJob == null || connectJob.isCancelled()) {
connectJob = scheduler.scheduleWithFixedDelay(connectRunnable, 0, CONNECT_RETRY_INTERVAL,
TimeUnit.MILLISECONDS);
}
eventThread = new Thread(eventRunnable, "ESH-Tesla-Event Stream-" + getThing().getUID());
eventThread.start();
Map<Object, Rate> channels = new HashMap<>();
channels.put(TESLA_DATA_THROTTLE, new Rate(1, 1, TimeUnit.SECONDS));
channels.put(TESLA_COMMAND_THROTTLE, new Rate(20, 1, TimeUnit.MINUTES));
Rate firstRate = new Rate(20, 1, TimeUnit.MINUTES);
Rate secondRate = new Rate(200, 10, TimeUnit.MINUTES);
stateThrottler = new QueueChannelThrottler(firstRate, scheduler, channels);
stateThrottler.addRate(secondRate);
if (fastStateJob == null || fastStateJob.isCancelled()) {
fastStateJob = scheduler.scheduleWithFixedDelay(fastStateRunnable, 0, FAST_STATUS_REFRESH_INTERVAL,
TimeUnit.MILLISECONDS);
}
if (slowStateJob == null || slowStateJob.isCancelled()) {
slowStateJob = scheduler.scheduleWithFixedDelay(slowStateRunnable, 0, SLOW_STATUS_REFRESH_INTERVAL,
TimeUnit.MILLISECONDS);
}
} finally {
lock.unlock();
}
}
@Override
public void dispose() {
logger.trace("Disposing the Tesla handler for {}", getThing().getUID());
lock.lock();
try {
if (fastStateJob != null && !fastStateJob.isCancelled()) {
fastStateJob.cancel(true);
fastStateJob = null;
}
if (slowStateJob != null && !slowStateJob.isCancelled()) {
slowStateJob.cancel(true);
slowStateJob = null;
}
if (!eventThread.isInterrupted()) {
eventThread.interrupt();
eventThread = null;
}
if (connectJob != null && !connectJob.isCancelled()) {
connectJob.cancel(true);
connectJob = null;
}
} finally {
lock.unlock();
}
}
@Override
public void handleCommand(ChannelUID channelUID, Command command) {
String channelID = channelUID.getId();
TeslaChannelSelector selector = TeslaChannelSelector.getValueSelectorFromChannelID(channelID);
if (command instanceof RefreshType) {
if (isAwake()) {
// Request the state of all known variables. This is sub-optimal, but the requests get scheduled and
// throttled so we are safe not to break the Tesla SLA
requestData(TESLA_DRIVE_STATE);
requestData(TESLA_VEHICLE_STATE);
requestData(TESLA_CHARGE_STATE);
requestData(TESLA_CLIMATE_STATE);
requestData(TESLA_GUI_STATE);
}
} else {
if (selector != null) {
try {
switch (selector) {
case CHARGE_LIMIT_SOC: {
if (command instanceof PercentType) {
setChargeLimit(((PercentType) command).intValue());
} else if (command instanceof OnOffType && command == OnOffType.ON) {
setChargeLimit(100);
} else if (command instanceof OnOffType && command == OnOffType.OFF) {
setChargeLimit(0);
} else if (command instanceof IncreaseDecreaseType
&& command == IncreaseDecreaseType.INCREASE) {
setChargeLimit(Math.min(chargeState.charge_limit_soc + 1, 100));
} else if (command instanceof IncreaseDecreaseType
&& command == IncreaseDecreaseType.DECREASE) {
setChargeLimit(Math.max(chargeState.charge_limit_soc - 1, 0));
}
break;
}
case TEMPERATURE: {
if (command instanceof DecimalType) {
if (getThing().getProperties().containsKey("temperatureunits")
&& getThing().getProperties().get("temperatureunits").equals("F")) {
float fTemp = ((DecimalType) command).floatValue();
float cTemp = ((fTemp - 32.0f) * 5.0f / 9.0f);
setTemperature(cTemp);
} else {
setTemperature(((DecimalType) command).floatValue());
}
}
break;
}
case SUN_ROOF_STATE: {
if (command instanceof StringType) {
setSunroof(command.toString());
}
break;
}
case SUN_ROOF: {
if (command instanceof PercentType) {
moveSunroof(((PercentType) command).intValue());
} else if (command instanceof OnOffType && command == OnOffType.ON) {
moveSunroof(100);
} else if (command instanceof OnOffType && command == OnOffType.OFF) {
moveSunroof(0);
} else if (command instanceof IncreaseDecreaseType
&& command == IncreaseDecreaseType.INCREASE) {
moveSunroof(Math.min(chargeState.charge_limit_soc + 1, 100));
} else if (command instanceof IncreaseDecreaseType
&& command == IncreaseDecreaseType.DECREASE) {
moveSunroof(Math.max(chargeState.charge_limit_soc - 1, 0));
}
break;
}
case CHARGE_TO_MAX: {
if (command instanceof OnOffType) {
if (((OnOffType) command) == OnOffType.ON) {
setMaxRangeCharging(true);
} else {
setMaxRangeCharging(false);
}
}
break;
}
case CHARGE: {
if (command instanceof OnOffType) {
if (((OnOffType) command) == OnOffType.ON) {
charge(true);
} else {
charge(false);
}
}
break;
}
case FLASH: {
if (command instanceof OnOffType) {
if (((OnOffType) command) == OnOffType.ON) {
flashLights();
}
}
break;
}
case HONK_HORN: {
if (command instanceof OnOffType) {
if (((OnOffType) command) == OnOffType.ON) {
honkHorn();
}
}
break;
}
case CHARGEPORT: {
if (command instanceof OnOffType) {
if (((OnOffType) command) == OnOffType.ON) {
openChargePort();
}
}
break;
}
case DOOR_LOCK: {
if (command instanceof OnOffType) {
if (((OnOffType) command) == OnOffType.ON) {
lockDoors(true);
} else {
lockDoors(false);
}
}
break;
}
case AUTO_COND: {
if (command instanceof OnOffType) {
if (((OnOffType) command) == OnOffType.ON) {
autoConditioning(true);
} else {
autoConditioning(false);
}
}
break;
}
case WAKEUP: {
if (command instanceof OnOffType) {
if (((OnOffType) command) == OnOffType.ON) {
wakeUp();
}
}
break;
}
case ALLOWWAKEUP: {
if (command instanceof OnOffType) {
allowWakeUp = (((OnOffType) command) == OnOffType.ON);
}
break;
}
case ENABLEEVENTS: {
if (command instanceof OnOffType) {
if (((OnOffType) command) == OnOffType.ON) {
if (eventThread == null) {
eventThread = new Thread(eventRunnable,
"ESH-Tesla-Event Stream-" + getThing().getUID());
eventThread.start();
}
} else {
if (eventThread != null) {
eventThread.interrupt();
eventThread = null;
}
}
}
break;
}
default:
break;
}
return;
} catch (IllegalArgumentException e) {
logger.warn(
"An error occurred while trying to set the read-only variable associated with channel '{}' to '{}'",
channelID, command.toString());
}
}
}
}
public void sendCommand(String command, String payLoad, WebTarget target) {
Request request = new Request(command, payLoad, target);
if (stateThrottler != null) {
stateThrottler.submit(TESLA_COMMAND_THROTTLE, request);
}
}
public void sendCommand(String command) {
sendCommand(command, "{}");
}
public void sendCommand(String command, String payLoad) {
Request request = new Request(command, payLoad, commandTarget);
if (stateThrottler != null) {
stateThrottler.submit(TESLA_COMMAND_THROTTLE, request);
}
}
public void sendCommand(String command, WebTarget target) {
Request request = new Request(command, "{}", target);
if (stateThrottler != null) {
stateThrottler.submit(TESLA_COMMAND_THROTTLE, request);
}
}
public void requestData(String command, String payLoad) {
Request request = new Request(command, payLoad, dataRequestTarget);
if (stateThrottler != null) {
stateThrottler.submit(TESLA_DATA_THROTTLE, request);
}
}
public void requestData(String command) {
requestData(command, null);
}
public void queryVehicle(String parameter) {
WebTarget target = vehicleTarget.path(parameter);
sendCommand(parameter, null, target);
}
protected String invokeAndParse(String command, String payLoad, WebTarget target) {
logger.debug("Invoking: {}", command);
if (vehicle.id != null) {
Response response;
if (payLoad != null) {
if (command != null) {
response = target.resolveTemplate("cmd", command).resolveTemplate("vid", vehicle.id).request()
.header("Authorization", "Bearer " + logonToken.access_token)
.post(Entity.entity(payLoad, MediaType.APPLICATION_JSON_TYPE));
} else {
response = target.resolveTemplate("vid", vehicle.id).request()
.header("Authorization", "Bearer " + logonToken.access_token)
.post(Entity.entity(payLoad, MediaType.APPLICATION_JSON_TYPE));
}
} else {
if (command != null) {
response = target.resolveTemplate("cmd", command).resolveTemplate("vid", vehicle.id)
.request(MediaType.APPLICATION_JSON_TYPE)
.header("Authorization", "Bearer " + logonToken.access_token).get();
} else {
response = target.resolveTemplate("vid", vehicle.id).request(MediaType.APPLICATION_JSON_TYPE)
.header("Authorization", "Bearer " + logonToken.access_token).get();
}
}
JsonParser parser = new JsonParser();
if (response != null && response.getStatus() == 200) {
try {
JsonObject jsonObject = parser.parse(response.readEntity(String.class)).getAsJsonObject();
logger.trace("Request : {}:{}:{} yields {}", new Object[] { command, payLoad, target.toString(),
jsonObject.get("response").toString() });
return jsonObject.get("response").toString();
} catch (Exception e) {
logger.error("An exception occurred while invoking a REST request : '{}'", e.getMessage());
}
} else {
logger.error("An error occurred while communicating with the vehicle during request {} : {}:{}",
new Object[] { command, (response != null) ? response.getStatus() : "",
(response != null) ? response.getStatusInfo() : "No Response" });
if (intervalErrors == 0 && response != null && response.getStatus() == 401) {
authenticate();
}
intervalErrors++;
if (intervalErrors >= MAXIMUM_ERRORS_IN_INTERVAL) {
logger.warn("Reached the maximum number of errors ({}) for the current interval ({} seconds)",
MAXIMUM_ERRORS_IN_INTERVAL, ERROR_INTERVAL_SECONDS);
updateStatus(ThingStatus.OFFLINE, ThingStatusDetail.COMMUNICATION_ERROR);
eventClient.close();
return null;
}
if ((System.currentTimeMillis() - intervalTimestamp) > 1000 * ERROR_INTERVAL_SECONDS) {
logger.trace("Resetting the error counter. ({} errors in the last interval)", intervalErrors);
intervalTimestamp = System.currentTimeMillis();
intervalErrors = 0;
}
}
}
return null;
}
public void parseAndUpdate(String request, String payLoad, String result) {
JsonParser parser = new JsonParser();
JsonObject jsonObject = null;
try {
if (request != null && result != null && !"null".equals(result)) {
// first, update state objects
switch (request) {
case TESLA_DRIVE_STATE: {
driveState = gson.fromJson(result, DriveState.class);
break;
}
case TESLA_GUI_STATE: {
guiState = gson.fromJson(result, GUIState.class);
break;
}
case TESLA_VEHICLE_STATE: {
vehicleState = gson.fromJson(result, VehicleState.class);
break;
}
case TESLA_CHARGE_STATE: {
chargeState = gson.fromJson(result, ChargeState.class);
if (chargeState.charging_state != null && "Charging".equals(chargeState.charging_state)) {
updateState(CHANNEL_CHARGE, OnOffType.ON);
} else {
updateState(CHANNEL_CHARGE, OnOffType.OFF);
}
break;
}
case TESLA_CLIMATE_STATE: {
climateState = gson.fromJson(result, ClimateState.class);
break;
}
}
// secondly, reformat the response string to a JSON compliant
// object for some specific non-JSON compatible requests
switch (request) {
case TESLA_MOBILE_ENABLED_STATE: {
jsonObject = new JsonObject();
jsonObject.addProperty(TESLA_MOBILE_ENABLED_STATE, result);
break;
}
default: {
jsonObject = parser.parse(result).getAsJsonObject();
break;
}
}
}
// process the result
if (jsonObject != null && result != null && !"null".equals(result)) {
// deal with responses for "set" commands, which get confirmed
// positively, or negatively, in which case a reason for failure
// is provided
if (jsonObject.get("reason") != null && jsonObject.get("reason").getAsString() != null) {
boolean requestResult = jsonObject.get("result").getAsBoolean();
logger.debug("The request ({}) execution was {}, and reported '{}'", new Object[] { request,
requestResult ? "successful" : "not successful", jsonObject.get("reason").getAsString() });
} else {
Set<Map.Entry<String, JsonElement>> entrySet = jsonObject.entrySet();
long resultTimeStamp = 0;
for (Map.Entry<String, JsonElement> entry : entrySet) {
if ("timestamp".equals(entry.getKey())) {
resultTimeStamp = Long.valueOf(entry.getValue().getAsString());
if (logger.isTraceEnabled()) {
Date date = new Date(resultTimeStamp);
SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS");
logger.trace("The request result timestamp is {}", dateFormatter.format(date));
}
break;
}
}
try {
lock.lock();
boolean proceed = true;
if (resultTimeStamp < lastTimeStamp && request == TESLA_DRIVE_STATE) {
proceed = false;
}
if (proceed) {
for (Map.Entry<String, JsonElement> entry : entrySet) {
try {
TeslaChannelSelector selector = TeslaChannelSelector
.getValueSelectorFromRESTID(entry.getKey());
if (!selector.isProperty()) {
if (!entry.getValue().isJsonNull()) {
updateState(selector.getChannelID(), teslaChannelSelectorProxy.getState(
entry.getValue().getAsString(), selector, editProperties()));
} else {
updateState(selector.getChannelID(), UnDefType.UNDEF);
}
} else {
if (!entry.getValue().isJsonNull()) {
Map<String, String> properties = editProperties();
properties.put(selector.getChannelID(), entry.getValue().getAsString());
updateProperties(properties);
}
}
} catch (IllegalArgumentException e) {
logger.trace("The variable/value pair '{}':'{}' is not (yet) supported",
entry.getKey(), entry.getValue());
} catch (ClassCastException | IllegalStateException e) {
logger.trace("An exception occurred while converting the JSON data : '{}'",
e.getMessage(), e);
}
}
} else {
logger.warn("The result for request '{}' is discarded due to an out of sync timestamp",
request);
}
} finally {
lock.unlock();
}
}
}
} catch (Exception p) {
logger.error("An exception occurred while parsing data received from the vehicle: '{}'", p.getMessage());
}
}
protected boolean isAwake() {
return vehicle != null && !"asleep".equals(vehicle.state) && vehicle.vehicle_id != null;
}
protected boolean isOnline() {
return vehicle != null && "online".equals(vehicle.state) && vehicle.vehicle_id != null;
}
protected boolean isInMotion() {
if (driveState != null) {
if (driveState.speed != null && driveState.shift_state != null) {
return !"Undefined".equals(driveState.speed)
&& (!"P".equals(driveState.shift_state) || !"Undefined".equals(driveState.shift_state));
}
}
return false;
}
public void setChargeLimit(int percent) {
JsonObject payloadObject = new JsonObject();
payloadObject.addProperty("percent", percent);
sendCommand(TESLA_COMMAND_SET_CHARGE_LIMIT, gson.toJson(payloadObject), commandTarget);
requestData(TESLA_CHARGE_STATE);
}
public void setSunroof(String state) {
JsonObject payloadObject = new JsonObject();
payloadObject.addProperty("state", state);
sendCommand(TESLA_COMMAND_SUN_ROOF, gson.toJson(payloadObject), commandTarget);
requestData(TESLA_VEHICLE_STATE);
}
public void moveSunroof(int percent) {
JsonObject payloadObject = new JsonObject();
payloadObject.addProperty("state", "move");
payloadObject.addProperty("percent", percent);
sendCommand(TESLA_COMMAND_SUN_ROOF, gson.toJson(payloadObject), commandTarget);
requestData(TESLA_VEHICLE_STATE);
}
public void setTemperature(float temperature) {
JsonObject payloadObject = new JsonObject();
payloadObject.addProperty("driver_temp", temperature);
payloadObject.addProperty("passenger_temp", temperature);
sendCommand(TESLA_COMMAND_SET_TEMP, gson.toJson(payloadObject), commandTarget);
requestData(TESLA_CLIMATE_STATE);
}
public void setMaxRangeCharging(boolean b) {
if (b) {
sendCommand(TESLA_COMMAND_CHARGE_MAX, commandTarget);
} else {
sendCommand(TESLA_COMMAND_CHARGE_STD, commandTarget);
}
requestData(TESLA_CHARGE_STATE);
}
public void charge(boolean b) {
if (b) {
sendCommand(TESLA_COMMAND_CHARGE_START, commandTarget);
} else {
sendCommand(TESLA_COMMAND_CHARGE_STOP, commandTarget);
}
requestData(TESLA_CHARGE_STATE);
}
public void flashLights() {
sendCommand(TESLA_COMMAND_FLASH_LIGHTS, commandTarget);
}
public void honkHorn() {
sendCommand(TESLA_COMMAND_HONK_HORN, commandTarget);
}
public void openChargePort() {
sendCommand(TESLA_COMMAND_OPEN_CHARGE_PORT, commandTarget);
requestData(TESLA_CHARGE_STATE);
}
public void lockDoors(boolean b) {
if (b) {
sendCommand(TESLA_COMMAND_DOOR_LOCK, commandTarget);
} else {
sendCommand(TESLA_COMMAND_DOOR_UNLOCK, commandTarget);
}
requestData(TESLA_VEHICLE_STATE);
}
public void autoConditioning(boolean b) {
if (b) {
sendCommand(TESLA_COMMAND_AUTO_COND_START, commandTarget);
} else {
sendCommand(TESLA_COMMAND_AUTO_COND_STOP, commandTarget);
}
requestData(TESLA_CLIMATE_STATE);
}
public void wakeUp() {
sendCommand(TESLA_COMMAND_WAKE_UP);
}
protected Vehicle queryVehicle() {
// get a list of vehicles
Response response = vehiclesTarget.request(MediaType.APPLICATION_JSON_TYPE)
.header("Authorization", "Bearer " + logonToken.access_token).get();
logger.debug("Querying the vehicle : Response : {}:{}", response.getStatus(), response.getStatusInfo());
JsonParser parser = new JsonParser();
JsonObject jsonObject = parser.parse(response.readEntity(String.class)).getAsJsonObject();
Vehicle[] vehicleArray = gson.fromJson(jsonObject.getAsJsonArray("response"), Vehicle[].class);
for (int i = 0; i < vehicleArray.length; i++) {
logger.debug("Querying the vehicle : VIN : {}", vehicleArray[i].vin);
if (vehicleArray[i].vin.equals(getConfig().get(VIN))) {
vehicleJSON = gson.toJson(vehicleArray[i]);
parseAndUpdate("queryVehicle", null, vehicleJSON);
return vehicleArray[i];
}
}
return null;
}
private String getStorageKey() {
return this.getThing().getUID().getId();
}
private ThingStatusDetail authenticate() {
Storage<Object> storage = storageService.getStorage(TeslaBindingConstants.BINDING_ID);
String storedToken = (String) storage.get(getStorageKey());
TokenResponse token = storedToken == null ? null : gson.fromJson(storedToken, TokenResponse.class);
SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
boolean hasExpired = true;
if (token != null) {
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(token.created_at * 1000);
logger.info("Found a request token created at {}", dateFormatter.format(calendar.getTime()));
calendar.setTimeInMillis(token.created_at * 1000 + 60 * token.expires_in);
Date now = new Date();
if (calendar.getTime().before(now)) {
logger.info("The token has expired at {}", dateFormatter.format(calendar.getTime()));
hasExpired = true;
} else {
hasExpired = false;
}
}
String username = (String) getConfig().get(USERNAME);
if (!StringUtils.isEmpty(username) && hasExpired) {
String password = (String) getConfig().get(PASSWORD);
return authenticate(username, password);
}
if (token == null || StringUtils.isEmpty(token.refresh_token)) {
return ThingStatusDetail.CONFIGURATION_ERROR;
}
TokenRequestRefreshToken tokenRequest = null;
try {
tokenRequest = new TokenRequestRefreshToken(token.refresh_token);
} catch (GeneralSecurityException e) {
logger.error("An exception occurred while requesting a new token : '{}'", e.getMessage(), e);
}
String payLoad = gson.toJson(tokenRequest);
Response response = tokenTarget.request().post(Entity.entity(payLoad, MediaType.APPLICATION_JSON_TYPE));
if (response == null) {
logger.debug("Authenticating : Response was null");
} else {
logger.debug("Authenticating : Response : {}:{}", response.getStatus(), response.getStatusInfo());
if (response.getStatus() == 200 && response.hasEntity()) {
String responsePayLoad = response.readEntity(String.class);
TokenResponse tokenResponse = gson.fromJson(responsePayLoad.trim(), TokenResponse.class);
if (tokenResponse != null && !StringUtils.isEmpty(tokenResponse.access_token)) {
storage.put(getStorageKey(), gson.toJson(tokenResponse));
this.logonToken = tokenResponse;
return ThingStatusDetail.NONE;
}
return ThingStatusDetail.NONE;
} else if (response.getStatus() == 401) {
if (!StringUtils.isEmpty(username)) {
String password = (String) getConfig().get(PASSWORD);
return authenticate(username, password);
} else {
return ThingStatusDetail.CONFIGURATION_ERROR;
}
} else if (response.getStatus() == 503 || response.getStatus() == 502) {
return ThingStatusDetail.COMMUNICATION_ERROR;
}
}
return ThingStatusDetail.CONFIGURATION_ERROR;
}
private ThingStatusDetail authenticate(String username, String password) {
TokenRequest token = null;
try {
token = new TokenRequestPassword(username, password);
} catch (GeneralSecurityException e) {
logger.error("An exception occurred while building a password request token : '{}'", e.getMessage(), e);
}
if (token != null) {
String payLoad = gson.toJson(token);
Response response = tokenTarget.request().post(Entity.entity(payLoad, MediaType.APPLICATION_JSON_TYPE));
if (response != null) {
logger.debug("Authenticating : Response : {}:{}", response.getStatus(), response.getStatusInfo());
if (response.getStatus() == 200 && response.hasEntity()) {
String responsePayLoad = response.readEntity(String.class);
TokenResponse tokenResponse = gson.fromJson(responsePayLoad.trim(), TokenResponse.class);
if (StringUtils.isNotEmpty(tokenResponse.access_token)) {
Storage<Object> storage = storageService.getStorage(TeslaBindingConstants.BINDING_ID);
storage.put(getStorageKey(), gson.toJson(tokenResponse));
this.logonToken = tokenResponse;
return ThingStatusDetail.NONE;
}
} else if (response.getStatus() == 401) {
return ThingStatusDetail.CONFIGURATION_ERROR;
} else if (response.getStatus() == 503 || response.getStatus() == 502) {
return ThingStatusDetail.COMMUNICATION_ERROR;
}
}
}
return ThingStatusDetail.CONFIGURATION_ERROR;
}
protected Runnable fastStateRunnable = () -> {
if (getThing().getStatus() == ThingStatus.ONLINE) {
if (isAwake()) {
requestData(TESLA_DRIVE_STATE);
requestData(TESLA_VEHICLE_STATE);
} else {
if (vehicle != null && allowWakeUp) {
wakeUp();
} else {
vehicle = queryVehicle();
}
}
}
if (allowWakeUp) {
updateState(CHANNEL_ALLOWWAKEUP, OnOffType.ON);
} else {
updateState(CHANNEL_ALLOWWAKEUP, OnOffType.OFF);
}
if (eventThread != null) {
updateState(CHANNEL_ENABLEEVENTS, OnOffType.ON);
} else {
updateState(CHANNEL_ENABLEEVENTS, OnOffType.OFF);
}
};
protected Runnable slowStateRunnable = () -> {
if (getThing().getStatus() == ThingStatus.ONLINE) {
if (isAwake()) {
requestData(TESLA_CHARGE_STATE);
requestData(TESLA_CLIMATE_STATE);
requestData(TESLA_GUI_STATE);
queryVehicle(TESLA_MOBILE_ENABLED_STATE);
parseAndUpdate("queryVehicle", null, vehicleJSON);
} else {
if (vehicle != null && allowWakeUp) {
wakeUp();
} else {
vehicle = queryVehicle();
}
}
}
};
protected Runnable connectRunnable = () -> {
try {
lock.lock();
if (getThing().getStatus() != ThingStatus.ONLINE) {
logger.debug("Setting up an authenticated connection to the Tesla back-end");
ThingStatusDetail authenticationResult = authenticate();
if (authenticationResult != ThingStatusDetail.NONE) {
updateStatus(ThingStatus.OFFLINE, authenticationResult);
} else {
// get a list of vehicles
Response response = vehiclesTarget.request(MediaType.APPLICATION_JSON_TYPE)
.header("Authorization", "Bearer " + logonToken.access_token).get();
if (response != null && response.getStatus() == 200 && response.hasEntity()) {
if ((vehicle = queryVehicle()) != null) {
logger.debug("Found the vehicle with VIN '{}' in the list of vehicles you own",
getConfig().get(VIN));
updateStatus(ThingStatus.ONLINE);
intervalErrors = 0;
intervalTimestamp = System.currentTimeMillis();
} else {
logger.warn("Unable to find the vehicle with VIN '{}' in the list of vehicles you own",
getConfig().get(VIN));
updateStatus(ThingStatus.OFFLINE);
}
} else {
if (response != null) {
logger.error("Error fetching the list of vehicles : {}:{}", response.getStatus(),
response.getStatusInfo());
updateStatus(ThingStatus.OFFLINE);
}
}
}
}
} catch (Exception e) {
logger.error("An exception occurred while connecting to the Tesla back-end: '{}'", e.getMessage());
} finally {
lock.unlock();
}
};
protected Runnable eventRunnable = new Runnable() {
Response eventResponse;
BufferedReader eventBufferedReader;
InputStreamReader eventInputStreamReader;
boolean isEstablished = false;
protected boolean establishEventStream() {
try {
if (!isEstablished) {
eventBufferedReader = null;
eventClient = ClientBuilder.newClient()
.property(ClientProperties.CONNECT_TIMEOUT, EVENT_STREAM_CONNECT_TIMEOUT)
.property(ClientProperties.READ_TIMEOUT, EVENT_STREAM_READ_TIMEOUT)
.register(new Authenticator((String) getConfig().get(USERNAME), vehicle.tokens[0]));
eventTarget = eventClient.target(TESLA_EVENT_URI).path(vehicle.vehicle_id + "/").queryParam(
"values", StringUtils.join(EventKeys.values(), ',', 1, EventKeys.values().length));
eventResponse = eventTarget.request(MediaType.TEXT_PLAIN_TYPE).get();
logger.debug("Event Stream : Establishing the event stream : Response : {}:{}",
eventResponse.getStatus(), eventResponse.getStatusInfo());
if (eventResponse.getStatus() == 200) {
InputStream dummy = (InputStream) eventResponse.getEntity();
eventInputStreamReader = new InputStreamReader(dummy);
eventBufferedReader = new BufferedReader(eventInputStreamReader);
isEstablished = true;
} else if (eventResponse.getStatus() == 401) {
isEstablished = false;
} else {
isEstablished = false;
}
}
} catch (Exception e) {
logger.error(
"Event Stream : An exception occurred while establishing the event stream for the vehicle: '{}'",
e.getMessage());
isEstablished = false;
}
return isEstablished;
}
@Override
public void run() {
while (true) {
try {
if (getThing().getStatus() == ThingStatus.ONLINE) {
if (isAwake()) {
if (establishEventStream()) {
String line = eventBufferedReader.readLine();
while (line != null) {
logger.debug("Event Stream : Received an event: '{}'", line);
String vals[] = line.split(",");
long currentTimeStamp = Long.valueOf(vals[0]);
long systemTimeStamp = System.currentTimeMillis();
if (logger.isDebugEnabled()) {
SimpleDateFormat dateFormatter = new SimpleDateFormat(
"yyyy-MM-dd'T'HH:mm:ss.SSS");
logger.debug("STS {} CTS {} Delta {}",
dateFormatter.format(new Date(systemTimeStamp)),
dateFormatter.format(new Date(currentTimeStamp)),
systemTimeStamp - currentTimeStamp);
}
if (systemTimeStamp - currentTimeStamp < EVENT_TIMESTAMP_AGE_LIMIT) {
if (currentTimeStamp > lastTimeStamp) {
lastTimeStamp = Long.valueOf(vals[0]);
if (logger.isDebugEnabled()) {
SimpleDateFormat dateFormatter = new SimpleDateFormat(
"yyyy-MM-dd'T'HH:mm:ss.SSS");
logger.debug("Event Stream : Event stamp is {}",
dateFormatter.format(new Date(lastTimeStamp)));
}
for (int i = 0; i < EventKeys.values().length; i++) {
TeslaChannelSelector selector = TeslaChannelSelector
.getValueSelectorFromRESTID((EventKeys.values()[i]).toString());
if (!selector.isProperty()) {
State newState = teslaChannelSelectorProxy.getState(vals[i],
selector, editProperties());
if (newState != null && !"".equals(vals[i])) {
updateState(selector.getChannelID(), newState);
} else {
updateState(selector.getChannelID(), UnDefType.UNDEF);
}
} else {
Map<String, String> properties = editProperties();
properties.put(selector.getChannelID(),
(selector.getState(vals[i])).toString());
updateProperties(properties);
}
}
} else {
if (logger.isDebugEnabled()) {
SimpleDateFormat dateFormatter = new SimpleDateFormat(
"yyyy-MM-dd'T'HH:mm:ss.SSS");
logger.debug(
"Event Stream : Discarding an event with an out of sync timestamp {} (last is {})",
dateFormatter.format(new Date(currentTimeStamp)),
dateFormatter.format(new Date(lastTimeStamp)));
}
}
} else {
if (logger.isDebugEnabled()) {
SimpleDateFormat dateFormatter = new SimpleDateFormat(
"yyyy-MM-dd'T'HH:mm:ss.SSS");
logger.debug(
"Event Stream : Discarding an event that differs {} ms from the system time: {} (system is {})",
systemTimeStamp - currentTimeStamp,
dateFormatter.format(currentTimeStamp),
dateFormatter.format(systemTimeStamp));
}
if (systemTimeStamp - currentTimeStamp > EVENT_TIMESTAMP_MAX_DELTA) {
if (logger.isTraceEnabled()) {
logger.trace("Event Stream : The event stream will be reset");
}
isEstablished = false;
}
}
line = eventBufferedReader.readLine();
}
if (line == null) {
if (logger.isTraceEnabled()) {
logger.trace("Event Stream : The end of stream was reached");
}
isEstablished = false;
}
}
} else {
logger.debug("Event stream : The vehicle is not awake");
if (vehicle != null && allowWakeUp) {
// wake up the vehicle until streaming token <> 0
logger.debug("Event stream : Waking up the vehicle");
wakeUp();
} else {
logger.debug("Event stream : Querying the vehicle");
vehicle = queryVehicle();
}
}
} else {
Thread.sleep(250);
}
} catch (IOException | NumberFormatException e) {
if (logger.isErrorEnabled()) {
logger.error("Event Stream : An exception occurred while reading events : '{}'",
e.getMessage());
}
isEstablished = false;
} catch (InterruptedException e) {
isEstablished = false;
}
if (Thread.interrupted()) {
logger.debug("Event Stream : the Event Stream was interrupted");
return;
}
}
}
};
protected class Request implements Runnable {
private String request;
private String payLoad;
private WebTarget target;
public Request(String request, String payLoad, WebTarget target) {
this.request = request;
this.payLoad = payLoad;
this.target = target;
}
@Override
public void run() {
try {
String result = "";
if (isAwake() && getThing().getStatus() == ThingStatus.ONLINE) {
result = invokeAndParse(request, payLoad, target);
}
if (result != null && !"".equals(result)) {
parseAndUpdate(request, payLoad, result);
}
} catch (Exception e) {
logger.error("An exception occurred while executing a request to the vehicle: '{}'", e.getMessage());
}
}
}
protected class Authenticator implements ClientRequestFilter {
private final String user;
private final String password;
public Authenticator(String user, String password) {
this.user = user;
this.password = password;
}
@Override
public void filter(ClientRequestContext requestContext) throws IOException {
MultivaluedMap<String, Object> headers = requestContext.getHeaders();
final String basicAuthentication = getBasicAuthentication();
headers.add("Authorization", basicAuthentication);
}
private String getBasicAuthentication() {
String token = this.user + ":" + this.password;
try {
return "Basic " + DatatypeConverter.printBase64Binary(token.getBytes("UTF-8"));
} catch (UnsupportedEncodingException ex) {
throw new IllegalStateException("Cannot encode with UTF-8", ex);
}
}
}
}
| [Tesla] Fix change wrt Tesla’s back-end handling Event Stream errors (#3531)
Signed-off-by: Karel Goderis <[email protected]> | addons/binding/org.openhab.binding.tesla/src/main/java/org/openhab/binding/tesla/handler/TeslaHandler.java | [Tesla] Fix change wrt Tesla’s back-end handling Event Stream errors (#3531) |
|
Java | agpl-3.0 | d8a8f60cca1a3fda9294e7cf95a7781dea7aa618 | 0 | exomiser/Exomiser,exomiser/Exomiser | /*
* The Exomiser - A tool to annotate and prioritize variants
*
* Copyright (C) 2012 - 2016 Charite Universitätsmedizin Berlin and Genome Research Ltd.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package de.charite.compbio.exomiser.rest.prioritiser.api;
import de.charite.compbio.exomiser.core.factories.GeneFactory;
import de.charite.compbio.exomiser.core.model.Gene;
import de.charite.compbio.exomiser.core.prioritisers.*;
import de.charite.compbio.jannovar.data.JannovarData;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import java.time.Duration;
import java.time.Instant;
import java.util.Comparator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Stream;
import static java.util.stream.Collectors.toList;
/**
* @author Jules Jacobsen <[email protected]>
*/
@RestController
public class PrioritiserController {
private static final Logger logger = LoggerFactory.getLogger(PrioritiserController.class);
private final PriorityFactory priorityFactory;
private final Map<String, String> geneIdentifiers;
@Autowired
public PrioritiserController(PriorityFactory priorityFactory, JannovarData jannovarData) {
this.priorityFactory = priorityFactory;
this.geneIdentifiers = GeneFactory.createKnownGeneIdentifiers(jannovarData);
logger.info("Created GeneIdentifier cache with {} entries", geneIdentifiers.size());
}
@GetMapping(value = "info")
public String info() {
return "This service will return a collection of prioritiser results for any given set of:" +
"\n\t - HPO identifiers e.g. HPO:00001" +
"\n\t - Entrez gene identifiers e.g. 23364" +
"\n\t - Specified prioritiser e.g. hiphive along with any prioritiser specific commands e.g. human,mouse,fish,ppi";
}
@GetMapping(value = "", produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
public PrioritiserResultSet prioritise(@RequestParam(value = "phenotypes") List<String> phenotypes,
@RequestParam(value = "genes", required = false, defaultValue = "") List<Integer> genesIds,
@RequestParam(value = "prioritiser") String prioritiserName,
@RequestParam(value = "prioritiser-params", required = false, defaultValue = "") String prioritiserParams,
@RequestParam(value = "limit", required = false, defaultValue = "0") Integer limit
) {
logger.info("phenotypes: {}({}) genes: {} prioritiser: {} prioritiser-params: {}", phenotypes, phenotypes.size(), genesIds, prioritiserName, prioritiserParams);
PriorityType priorityType = parsePrioritserType(prioritiserName.trim());
Prioritiser prioritiser = setUpPrioritiser(phenotypes, prioritiserParams, priorityType);
//this is a slow step - GeneIdentifiers should be used instead of genes. GeneIdentifiers can be cached.
Instant start = Instant.now();
List<Gene> genes = parseGeneIdentifiers(genesIds);
prioritiser.prioritizeGenes(genes);
//in an ideal world this would return Stream<PriorityResult> results = prioritiser.prioritise(hpoIds, geneIds)
//and the next section would be mostly superfluous
List<PriorityResult> results = getPrioritiserResults(limit, priorityType, genes);
Instant end = Instant.now();
Duration duration = Duration.between(start, end);
Map<String, String> params = new LinkedHashMap<>();
params.put("phenotypes", phenotypes.toString());
params.put("genes", genesIds.toString());
params.put("prioritiser", prioritiserName);
params.put("prioritiser-params", prioritiserParams);
params.put("limit", limit.toString());
return new PrioritiserResultSet(params, duration.toMillis(), results);
}
Prioritiser setUpPrioritiser(List<String> phenotypes, String prioritiserParams, PriorityType priorityType) {
List<String> uniquePhenotypes = phenotypes.stream().distinct().collect(toList());
PrioritiserSettings prioritiserSettings = new PrioritiserSettingsImpl.PrioritiserSettingsBuilder().hpoIdList(uniquePhenotypes).exomiser2Params(prioritiserParams).build();
return priorityFactory.makePrioritiser(priorityType, prioritiserSettings);
}
private PriorityType parsePrioritserType(String prioritiserName) {
switch(prioritiserName) {
case "phenix":
return PriorityType.PHENIX_PRIORITY;
case "phive":
return PriorityType.PHIVE_PRIORITY;
case "hiphive":
default:
return PriorityType.HIPHIVE_PRIORITY;
}
}
private List<Gene> parseGeneIdentifiers(List<Integer> genesIds) {
if (genesIds.isEmpty()) {
logger.info("Gene identifiers not specified - will compare against all known genes.");
//If not specified, we'll assume they want to use the whole genome. Should save people a lot of typing.
//n.b. Gene is mutable so these can't be cached and returned.
return geneIdentifiers.entrySet().parallelStream()
//geneId and geneSymbol are the same in cases where
.map(entry -> {
String geneId = entry.getKey();
String geneSymbol = entry.getValue();
if (geneId.equals(geneSymbol)) {
return new Gene(geneSymbol, -1);
}
//we're assuming Entrez ids here.
return new Gene(geneSymbol, Integer.parseInt(geneId));
})
.collect(toList());
}
//this is a hack - really the Prioritiser should only work on GeneIds, but currently this isn't possible as OmimPrioritiser uses some properties of Gene
return genesIds.stream()
.map(id -> new Gene(geneIdentifiers.getOrDefault(Integer.toString(id), "GENE:" + id), id))
.collect(toList());
}
List<PriorityResult> getPrioritiserResults(int limit, PriorityType priorityType, List<Gene> genes) {
Stream<PriorityResult> sortedPriorityResultsStream = genes.stream()
.map(gene -> gene.getPriorityResult(priorityType))
.sorted(Comparator.naturalOrder());
if (limit == 0) {
return sortedPriorityResultsStream.collect(toList());
}
return sortedPriorityResultsStream.limit(limit).collect(toList());
}
}
| exomiser-rest-prioritiser/src/main/java/de/charite/compbio/exomiser/rest/prioritiser/api/PrioritiserController.java | /*
* The Exomiser - A tool to annotate and prioritize variants
*
* Copyright (C) 2012 - 2016 Charite Universitätsmedizin Berlin and Genome Research Ltd.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package de.charite.compbio.exomiser.rest.prioritiser.api;
import de.charite.compbio.exomiser.core.factories.GeneFactory;
import de.charite.compbio.exomiser.core.model.Gene;
import de.charite.compbio.exomiser.core.prioritisers.*;
import de.charite.compbio.jannovar.data.JannovarData;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import java.time.Duration;
import java.time.Instant;
import java.util.Comparator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Stream;
import static java.util.stream.Collectors.toList;
/**
* @author Jules Jacobsen <[email protected]>
*/
@RestController
public class PrioritiserController {
private static final Logger logger = LoggerFactory.getLogger(PrioritiserController.class);
private final PriorityFactory priorityFactory;
private final Map<String, String> geneIdentifiers;
@Autowired
public PrioritiserController(PriorityFactory priorityFactory, JannovarData jannovarData) {
this.priorityFactory = priorityFactory;
this.geneIdentifiers = GeneFactory.createKnownGeneIdentifiers(jannovarData);
logger.info("Created GeneIdentifier cache with {} entries", geneIdentifiers.size());
}
@GetMapping(value = "info")
public String info() {
return "This service will return a collection of prioritiser results for any given set of:" +
"\n\t - HPO identifiers e.g. HPO:00001" +
"\n\t - Entrez gene identifiers e.g. 23364" +
"\n\t - Specified prioritiser e.g. hiphive along with any prioritiser specific commands e.g. human,mouse,fish,ppi";
}
@GetMapping(value = "", produces = MediaType.APPLICATION_JSON_UTF8_VALUE)
public PrioritiserResultSet prioritise(@RequestParam(value = "phenotypes") List<String> phenotypes,
@RequestParam(value = "genes", required = false, defaultValue = "") List<Integer> genesIds,
@RequestParam(value = "prioritiser") String prioritiserName,
@RequestParam(value = "prioritiser-params", required = false, defaultValue = "") String prioritiserParams,
@RequestParam(value = "limit", required = false, defaultValue = "0") Integer limit
) {
logger.info("phenotypes: {}({}) genes: {} prioritiser: {} prioritiser-params: {}", phenotypes, phenotypes.size(), genesIds, prioritiserName, prioritiserParams);
PriorityType priorityType = parsePrioritserType(prioritiserName.trim());
Prioritiser prioritiser = setUpPrioritiser(phenotypes, prioritiserParams, priorityType);
//this is a slow step - GeneIdentifiers should be used instead of genes. GeneIdentifiers can be cached.
Instant start = Instant.now();
List<Gene> genes = parseGeneIdentifiers(genesIds);
prioritiser.prioritizeGenes(genes);
//in an ideal world this would return Stream<PriorityResult> results = prioritiser.prioritise(hpoIds, geneIds)
//and the next section would be mostly superfluous
List<PriorityResult> results = getPrioritiserResults(limit, priorityType, genes);
Instant end = Instant.now();
Duration duration = Duration.between(start, end);
Map<String, String> params = new LinkedHashMap<>();
params.put("phenotypes", phenotypes.toString());
params.put("genes", genesIds.toString());
params.put("prioritiser", prioritiserName);
params.put("prioritiser-params", prioritiserParams);
params.put("limit", limit.toString());
return new PrioritiserResultSet(params, duration.toMillis(), results);
}
Prioritiser setUpPrioritiser(List<String> phenotypes, String prioritiserParams, PriorityType priorityType) {
PrioritiserSettings prioritiserSettings = new PrioritiserSettingsImpl.PrioritiserSettingsBuilder().hpoIdList(phenotypes).exomiser2Params(prioritiserParams).build();
return priorityFactory.makePrioritiser(priorityType, prioritiserSettings);
}
private PriorityType parsePrioritserType(String prioritiserName) {
switch(prioritiserName) {
case "phenix":
return PriorityType.PHENIX_PRIORITY;
case "phive":
return PriorityType.PHIVE_PRIORITY;
case "hiphive":
default:
return PriorityType.HIPHIVE_PRIORITY;
}
}
private List<Gene> parseGeneIdentifiers(List<Integer> genesIds) {
if (genesIds.isEmpty()) {
logger.info("Gene identifiers not specified - will compare against all known genes.");
//If not specified, we'll assume they want to use the whole genome. Should save people a lot of typing.
//n.b. Gene is mutable so these can't be cached and returned.
return geneIdentifiers.entrySet().parallelStream()
//geneId and geneSymbol are the same in cases where
.map(entry -> {
String geneId = entry.getKey();
String geneSymbol = entry.getValue();
if (geneId.equals(geneSymbol)) {
return new Gene(geneSymbol, -1);
}
//we're assuming Entrez ids here.
return new Gene(geneSymbol, Integer.parseInt(geneId));
})
.collect(toList());
}
//this is a hack - really the Prioritiser should only work on GeneIds, but currently this isn't possible as OmimPrioritiser uses some properties of Gene
return genesIds.stream()
.map(id -> new Gene(geneIdentifiers.getOrDefault(Integer.toString(id), "GENE:" + id), id))
.collect(toList());
}
List<PriorityResult> getPrioritiserResults(int limit, PriorityType priorityType, List<Gene> genes) {
Stream<PriorityResult> sortedPriorityResultsStream = genes.stream()
.map(gene -> gene.getPriorityResult(priorityType))
.sorted(Comparator.naturalOrder());
if (limit == 0) {
return sortedPriorityResultsStream.collect(toList());
}
return sortedPriorityResultsStream.limit(limit).collect(toList());
}
}
| Removed duplicated terms from input phenotypes.
| exomiser-rest-prioritiser/src/main/java/de/charite/compbio/exomiser/rest/prioritiser/api/PrioritiserController.java | Removed duplicated terms from input phenotypes. |
|
Java | agpl-3.0 | 04fb48485b76be2bf65b255600e167752ca17ec1 | 0 | Stanwar/agreementmaker,sabarish14/agreementmaker,Stanwar/agreementmaker,sabarish14/agreementmaker,Stanwar/agreementmaker,sabarish14/agreementmaker,Stanwar/agreementmaker,sabarish14/agreementmaker | package iterativeinstancestructuralmatcher.internal;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import am.AMException;
import am.Utility;
import am.app.Core;
import am.app.mappingEngine.AbstractMatcher;
import am.app.mappingEngine.AbstractMatcherParametersPanel;
import am.app.mappingEngine.Alignment;
import am.app.mappingEngine.Mapping;
import am.app.mappingEngine.MatcherFactory;
import am.app.mappingEngine.MatchersRegistry;
import am.app.mappingEngine.referenceAlignment.ReferenceAlignmentMatcher;
import am.app.mappingEngine.referenceAlignment.ReferenceEvaluationData;
import am.app.mappingEngine.referenceAlignment.ReferenceEvaluator;
import am.app.mappingEngine.similarityMatrix.ArraySimilarityMatrix;
import am.app.ontology.Node;
import am.app.ontology.Ontology;
import am.userInterface.MatcherParametersDialog;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.ontology.AllValuesFromRestriction;
import com.hp.hpl.jena.ontology.CardinalityRestriction;
import com.hp.hpl.jena.ontology.Individual;
import com.hp.hpl.jena.ontology.MaxCardinalityRestriction;
import com.hp.hpl.jena.ontology.MinCardinalityRestriction;
import com.hp.hpl.jena.ontology.OntClass;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntProperty;
import com.hp.hpl.jena.ontology.OntResource;
import com.hp.hpl.jena.ontology.Restriction;
import com.hp.hpl.jena.ontology.UnionClass;
import com.hp.hpl.jena.rdf.model.Literal;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.util.iterator.ExtendedIterator;
public class IterativeInstanceStructuralMatcher extends AbstractMatcher {
private static final long serialVersionUID = 3612931342445940115L;
double PROPERTY_THRESHOLD = 0.8;
double CLASS_THRESHOLD = 0.8;
boolean individuals = true;
boolean matchUnionClasses = true;
boolean printMappingTable = false;
static boolean verbose = false;
private List<Node> sourceClassList;
private List<Node> targetClassList;
private List<Node> sourcePropList;
private List<Node> targetPropList;
private transient HashMap<Node, List<Restriction>> sourceRestrictions;
private transient HashMap<Node, List<Restriction>> targetRestrictions;
private transient HashMap<Restriction, Node> restrictions;
private transient HashMap<OntProperty, List<String>> sourcePropValues;
private transient HashMap<OntProperty, List<String>> targetPropValues;
private PropertySimilarity[][] propSimilarities;
private ClassSimilarity[][] classSimilarities;
//Provenance Strings
private final String RECURSIVE_INDIVIDUALS = "Recursive Individuals";
private final String SUBPROPERTIES = "Subproperties";
private final String SUBCLASSES = "Subclasses";
private final String COMBINATION = "Combination";
private final String PROPERTY_VALUES = "Property Values";
private final String PROPERTY_USAGE = "Property Usage";
private final String UNION_CLASSES = "Union Classes";
private final String SUBCLASSOF = "SubclassOf";
private final String RANGE_DOMAIN = "Range Domain";
private final String SYNTACTIC = "Syntactic";
IterativeInstanceStructuralParameters parameters;
public IterativeInstanceStructuralMatcher(){
super();
}
@Override
protected void initializeVariables() {
super.initializeVariables();
minInputMatchers = 0;
maxInputMatchers = 1;
needsParam = true;
setName("Iterative Instance and Structural Matcher");
setCategory(MatcherCategory.HYBRID);
}
@Override
public void matchEnd() {
// TODO Auto-generated method stub
super.matchEnd();
if(printMappingTable)
evaluate();
}
// @SuppressWarnings("unchecked")
@Override
protected void align() throws Exception {
if (sourceOntology == null || targetOntology == null)
return; // cannot align just one ontology
if(param!=null)
parameters = (IterativeInstanceStructuralParameters)param;
sourceClassList = sourceOntology.getClassesList();
targetClassList = targetOntology.getClassesList();
sourcePropList = sourceOntology.getPropertiesList();
targetPropList = targetOntology.getPropertiesList();
//Initialize maps for information about restrictions
initHashMaps();
receiveInputMatrices();
initSimilarityMatrices();
if(individuals){
sourcePropValues = initPropValues(sourcePropList,sourceOntology);
targetPropValues = initPropValues(targetPropList,targetOntology);
}
printPropValues();
if(individuals){
//Match properties by similar values
if(parameters.usePropertyUsage)
matchPropertyValues();
Node source;
Node target;
for (int i = 0; i < sourceClassList.size(); i++) {
source = sourceClassList.get(i);
for (int j = 0; j < targetClassList.size(); j++) {
target = targetClassList.get(j);
if(matchIndividuals(source,target)){
Mapping m = new Mapping(source, target, 1.0);
m.setProvenance(RECURSIVE_INDIVIDUALS);
classesMatrix.set(i, j, m);
}
}
}
}
//Iterative part
for (int i = 0; ; i++) {
double totAlign = getNumberOfClassAlignments() + getNumberOfPropAlignments();
//Match by superclasses and restriction on properties
if(parameters.useSuperclasses)
matchSuperclasses();
//Match properties by range and domain
if(parameters.useRangeDomain)
matchRangeAndDomain();
//Match properties by their presence in restrictions
if(parameters.useRangeDomain)
matchPropertyUsage();
//match sons of aligned classes
matchSubClasses();
//match sons of aligned properties
matchSubProperties();
findNewAlignments();
double totAlign2 = getNumberOfClassAlignments() + getNumberOfPropAlignments();
if(totAlign2==totAlign){
if( Core.DEBUG_FCM ) System.out.println("CONVERGED IN "+(i+1)+" ITERATIONS");
break;
}
}
if( matchUnionClasses ) matchUnionClasses();
filterNonOntologyAlignments();
//printAllSimilarities();
//evaluate();
}
private void findNewAlignments() {
double sim;
for (int i = 0; i < classSimilarities.length; i++) {
for (int j = 0; j < classSimilarities[0].length; j++) {
sim = classSimilarities[i][j].getSimilarity();
if(sim > classesMatrix.getSimilarity(i, j)){
Mapping m = new Mapping( sourceClassList.get(i), targetClassList.get(j), sim );
m.setProvenance(COMBINATION);
classesMatrix.set(i, j, m);
}
}
}
for (int i = 0; i < propSimilarities.length; i++) {
for (int j = 0; j < propSimilarities[0].length; j++) {
//System.out.print(sourcePropList.get(i)+" "+targetPropList.get(j));
sim = propSimilarities[i][j].getSimilarity();
if(sim > propertiesMatrix.getSimilarity(i, j)){
Mapping m = new Mapping( sourcePropList.get(i), targetPropList.get(j), sim );
m.setProvenance(COMBINATION);
propertiesMatrix.set(i, j, m);
}
}
}
}
private void initSimilarityMatrices() {
classSimilarities = new ClassSimilarity[sourceClassList.size()][targetClassList.size()];
propSimilarities = new PropertySimilarity[sourcePropList.size()][targetPropList.size()];
for (int i = 0; i < sourceClassList.size(); i++) {
for (int j = 0; j < targetClassList.size(); j++) {
classSimilarities[i][j] = new ClassSimilarity();
classSimilarities[i][j].setSyntactic(classesMatrix.getSimilarity(i, j));
}
}
for (int i = 0; i < sourcePropList.size(); i++) {
for (int j = 0; j < targetPropList.size(); j++) {
propSimilarities[i][j] = new PropertySimilarity();
propSimilarities[i][j].setSyntactic(propertiesMatrix.getSimilarity(i, j));
}
}
}
private void receiveInputMatrices() {
if(inputMatchers.size()>0){
AbstractMatcher input = inputMatchers.get(0);
//classesMatrix = input.getClassesMatrix();
try {
classesMatrix = new ArraySimilarityMatrix(input.getClassesMatrix());
} catch( AMException e ) {
e.printStackTrace();
}
//propertiesMatrix = input.getPropertiesMatrix();
try {
propertiesMatrix = new ArraySimilarityMatrix(input.getPropertiesMatrix());
} catch( AMException e ) {
e.printStackTrace();
}
//System.out.println();
}
else{
classesMatrix = new ArraySimilarityMatrix(sourceOntology, targetOntology, alignType.aligningClasses);
propertiesMatrix = new ArraySimilarityMatrix(sourceOntology, targetOntology, alignType.aligningProperties);
}
}
private void filterNonOntologyAlignments() {
for (int i = 0; i < sourceClassList.size(); i++) {
if(!sourceClassList.get(i).getUri().startsWith(sourceOntology.getURI())){
for (int j = 0; j < targetClassList.size(); j++) {
classesMatrix.set(i, j, null);
}
}
}
for (int j = 0; j < targetClassList.size(); j++) {
if(!targetClassList.get(j).getUri().startsWith(targetOntology.getURI())){
for (int i = 0; i < sourceClassList.size(); i++) {
classesMatrix.set(i, j, null);
}
}
}
for (int i = 0; i < sourcePropList.size(); i++) {
if(!sourcePropList.get(i).getUri().startsWith(sourceOntology.getURI())){
for (int j = 0; j < targetPropList.size(); j++) {
propertiesMatrix.set(i, j, null);
}
}
}
for (int j = 0; j < targetPropList.size(); j++) {
if(!targetPropList.get(j).getUri().startsWith(targetOntology.getURI())){
for (int i = 0; i < sourcePropList.size(); i++) {
propertiesMatrix.set(i, j, null);
}
}
}
}
private void evaluate() {
ReferenceAlignmentMatcher refMatcher = (ReferenceAlignmentMatcher)MatcherFactory.getMatcherInstance(MatchersRegistry.ImportAlignment,0);
MatcherParametersDialog dialog = new MatcherParametersDialog(refMatcher,false,false);
if(dialog.parametersSet()) {
refMatcher.setParam(dialog.getParameters());
refMatcher.setThreshold(refMatcher.getDefaultThreshold());
refMatcher.setMaxSourceAlign(refMatcher.getDefaultMaxSourceRelations());
refMatcher.setMaxTargetAlign(refMatcher.getDefaultMaxTargetRelations());
try {
refMatcher.match();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
String report="Reference Evaluation Complete\n\n";
ReferenceEvaluationData rd = ReferenceEvaluator.compare(getAlignment(), refMatcher.getAlignment());
setRefEvaluation(rd);
System.out.println("CORRECT MAPPINGS");
System.out.println(allSimilarities(rd.getCorrectAlignments()));
System.out.println("WRONG MAPPINGS");
System.out.println(allSimilarities(rd.getErrorAlignments()));
System.out.println("MISSED MAPPINGS");
System.out.println(allSimilarities(rd.getLostAlignments()));
report+= getRegistryEntry().getMatcherName()+"\n\n";
report +=rd.getReport()+"\n";
Utility.displayTextAreaPane(report,"Reference Evaluation Report");
}
dialog.dispose();
Core.getUI().redisplayCanvas();
}
private void matchPropertyValues() {
if( Core.DEBUG_FCM ) System.out.println("MATCH PROPERTY VALUES");
OntProperty sProp;
OntProperty tProp;
for (int i = 0; i < sourcePropList.size() ; i++) {
sProp = (OntProperty)sourcePropList.get(i).getResource().as(OntProperty.class);
List<String> sList = sourcePropValues.get(sProp);
for (int j = 0; j < targetPropList.size(); j++) {
tProp = (OntProperty)targetPropList.get(j).getResource().as(OntProperty.class);
if(!sProp.getURI().startsWith(sourceOntology.getURI())||
!tProp.getURI().startsWith(targetOntology.getURI()))
continue;
List<String> tList = targetPropValues.get(tProp);
if(sList.size()==0 || tList.size()==0) continue;
if( Core.DEBUG_FCM ) System.out.println(sProp.getLocalName()+" "+tProp.getLocalName()+" litsize: "+sList.size()+" "+tList.size());
double sim = 0;
String l1;
String l2;
for (int k = 0; k < sList.size(); k++) {
l1 = sList.get(k);
for (int t = 0; t < tList.size(); t++) {
l2 = tList.get(t);
if(l1.equals(l2)){
sim++;
}
}
}
sim = sim / Math.max(sList.size(),tList.size());
propSimilarities[i][j].setValues(sim);
if(sim >= parameters.getPropertyValuesThreshold()){
Mapping m = new Mapping(sourcePropList.get(i), targetPropList.get(j), sim);
m.setProvenance(PROPERTY_VALUES);
if(parameters.boostPropertyValues) m.setSimilarity(1.0d);
propertiesMatrix.set(i, j, m);
if( Core.DEBUG_FCM ) System.out.println("ALIGNMENT:"+sProp.getLocalName()+" "+tProp.getLocalName()+" BY PROP VALUES");
}
}
}
}
@SuppressWarnings("unchecked")
private HashMap<OntProperty, List<String>> initPropValues(List<Node> propList,Ontology ontology) {
HashMap<OntProperty, List<String>> propValues = new HashMap<OntProperty, List<String>>();
List<Statement> stmts;
List<String> literals;
for (int i = 0; i < propList.size(); i++) {
OntProperty sProp = (OntProperty)propList.get(i).getResource().as(OntProperty.class);
//System.out.println("Prop: "+sProp);
literals = new ArrayList<String>();
stmts = ontology.getModel().listStatements(null, sProp, (RDFNode)null).toList();
if( stmts.isEmpty() ) { stmts = ontology.getModel().listStatements(null, ontology.getModel().getProperty(sProp.getLocalName()) ,(RDFNode)null).toList(); }
for (int j = 0; j < stmts.size(); j++) {
Statement s = stmts.get(j);
//System.out.println(s);
RDFNode obj = s.getObject();
if(obj.isLiteral()){
Literal l = (Literal)obj;
if(!literals.contains(l.getString()))
literals.add(l.getString());
}
}
propValues.put(sProp, literals);
}
return propValues;
}
private void matchSubProperties() {
if(verbose)
if( Core.DEBUG_FCM ) System.out.println("MATCH SUBPROPERTIES");
ArrayList<OntProperty> sSub;
ArrayList<OntProperty> tSub;
for (int i = 0; i < sourcePropList.size(); i++) {
sSub = new ArrayList<OntProperty>();
OntProperty pr1 = (OntProperty)sourcePropList.get(i).getResource().as(OntProperty.class);
ExtendedIterator it1 = pr1.listSubProperties();
while(it1.hasNext()){
sSub.add((OntProperty)it1.next());
}
for (int j = 0; j < targetPropList.size(); j++){
tSub = new ArrayList<OntProperty>();
OntProperty pr2 = (OntProperty)targetPropList.get(j).getResource().as(OntProperty.class);
ExtendedIterator it2 = pr2.listSubProperties();
while(it2.hasNext()){
tSub.add((OntProperty)it2.next());
}
if(alignedProp(pr1.getURI(),pr2.getURI())>=PROPERTY_THRESHOLD &&
sSub.size()==tSub.size() && sSub.size()>0){
if(verbose){
if( Core.DEBUG_FCM ) System.out.println("size: "+sSub.size());
if( Core.DEBUG_FCM ) System.out.println("prop1: "+pr1.getLocalName());
if( Core.DEBUG_FCM ) System.out.println(sSub);
if( Core.DEBUG_FCM ) System.out.println("prop2: "+pr2.getLocalName());
if( Core.DEBUG_FCM ) System.out.println(tSub);
}
for (int k = 0; k < sSub.size(); k++) {
for (int t = 0; t < tSub.size(); t++) {
if(alignedProp(sSub.get(k).getURI(),tSub.get(t).getURI())>=PROPERTY_THRESHOLD){
sSub.remove(k);
tSub.remove(t);
k--;
t--;
break;
}
}
}
if(verbose){
System.out.println("Still to align: "+sSub.size());
}
if(sSub.size()==0) continue;
if(sSub.size()==1){
int row = getIndex(sourcePropList,sSub.get(0).getURI());
int col = getIndex(targetPropList,tSub.get(0).getURI());
Mapping m = new Mapping( sourcePropList.get(row), targetPropList.get(col), 1.0d);
m.setProvenance(SUBPROPERTIES);
propertiesMatrix.set( row, col, m);
if( Core.DEBUG_FCM ) System.out.println("ALIGNMENT:"+sSub.get(0).getLocalName()+" "+
tSub.get(0).getLocalName()+" BY SUBPROPERTIES");
continue;
}
double[][] sims = new double[sSub.size()][sSub.size()];
for (int k = 0; k < sSub.size(); k++) {
for (int t = 0; t < tSub.size(); t++) {
sims[k][t] = rangeAndDomainSimilarity(sSub.get(k), tSub.get(t));
}
}
List<AlignIndexes> aligns = Utils.optimalAlignments(sims);
for (int k = 0; k < aligns.size(); k++) {
if( Core.DEBUG_FCM ) System.out.println(aligns.get(k).getX()+" "+aligns.get(k).getY());
int row = getIndex(sourcePropList,sSub.get(aligns.get(k).getX()).getURI());
int col = getIndex(targetPropList,tSub.get(aligns.get(k).getY()).getURI());
Mapping m = new Mapping( sourcePropList.get(row), targetPropList.get(col), 1.0d);
m.setProvenance(SUBPROPERTIES);
propertiesMatrix.set( row, col, m);
if( Core.DEBUG_FCM ) System.out.println("ALIGNMENT:"+sSub.get(aligns.get(k).getX()).getLocalName()+" "
+tSub.get(aligns.get(k).getY()).getLocalName()+ " BY SUBPROPERTIES");
}
if(verbose){
Utils.printMatrix(sims);
}
}
}
}
return;
}
private double rangeAndDomainSimilarity(OntProperty sProp,
OntProperty tProp) {
if((sProp.getDomain()==null && tProp.getDomain()!=null) ||
(sProp.getDomain()!=null && tProp.getDomain()==null))
return 0.0;
if((sProp.getRange()==null && tProp.getRange()!=null) ||
(sProp.getRange()!=null && tProp.getRange()==null))
return 0.0;
if(sProp.isDatatypeProperty() && !tProp.isDatatypeProperty() ||
!sProp.isDatatypeProperty() && tProp.isDatatypeProperty())
return 0.0;
if(verbose){
System.out.println(sProp.getLocalName()+","+tProp.getLocalName());
}
double rangeSim = 0;
double domainSim = 0;
boolean unions = false;
if(sProp.getDomain()==null && tProp.getDomain()==null){
domainSim = 0.8;
}
else{
try {
domainSim = domainSimilarity(sProp.getDomain(),tProp.getDomain());
if(sProp.getDomain().asClass().isUnionClass() &&
tProp.getDomain().asClass().isUnionClass())
unions = true;
} catch( Exception e ) {
e.printStackTrace();
domainSim = 0.8;
}
}
if(sProp.getRange()!=null && tProp.getRange()!=null)
rangeSim = compareResources(sProp.getRange(), tProp.getRange());
if(sProp.getRange()==null && tProp.getRange()==null)
rangeSim = 0.8;
if(!unions && tProp.getRange()!=null && tProp.getRange().getURI() != null &&
Utils.primitiveType(tProp.getRange().getURI()))
rangeSim *= 0.75;
/*
else if(sProp.isObjectProperty() && tProp.isObjectProperty()){
domainSim = alignedClass(sProp.getRange().getURI(), tProp.getRange().getURI());
}*/
if(verbose){
System.out.println("rangesim: "+rangeSim+" domsim: "+domainSim);
}
double sim = (rangeSim+domainSim)/2;
return sim;
}
private void matchSubClasses() {
if(verbose){
System.out.println("MATCH SUBCLASSES");
}
ArrayList<OntClass> sSub;
ArrayList<OntClass> tSub;
for (int i = 0; i < sourceClassList.size(); i++) {
sSub = new ArrayList<OntClass>();
OntClass cl1 = (OntClass)sourceClassList.get(i).getResource().as(OntClass.class);
ExtendedIterator it1 = cl1.listSubClasses();
while(it1.hasNext()){
sSub.add((OntClass)it1.next());
}
for (int j = 0; j < targetClassList.size(); j++){
tSub = new ArrayList<OntClass>();
OntClass cl2 = (OntClass)targetClassList.get(j).getResource().as(OntClass.class);
ExtendedIterator it2 = cl2.listSubClasses();
while(it2.hasNext()){
tSub.add((OntClass)it2.next());
}
if(alignedClass(cl1.getURI(),cl2.getURI())>=CLASS_THRESHOLD &&
sSub.size()==tSub.size() && sSub.size()>0){
if(verbose){
System.out.println("size: "+sSub.size());
System.out.println("class1: "+cl1.getLocalName());
System.out.println(sSub);
System.out.println("class2: "+cl2.getLocalName());
System.out.println(tSub);
}
for (int k = 0; k < sSub.size(); k++) {
for (int t = 0; t < tSub.size(); t++) {
if(alignedClass(sSub.get(k).getURI(),tSub.get(t).getURI())>=CLASS_THRESHOLD){
sSub.remove(k);
tSub.remove(t);
k--;
t--;
break;
}
}
}
if(verbose){
System.out.println("Still to align: "+sSub.size());
}
if(sSub.size()==1){
int row = getIndex(sourceClassList,sSub.get(0).getURI());
int col = getIndex(targetClassList,tSub.get(0).getURI());
Mapping m = new Mapping( sourceClassList.get(row), targetClassList.get(col), 1.0d);
m.setProvenance(SUBCLASSES);
classesMatrix.set(row, col, m);
if( Core.DEBUG_FCM ) System.out.println("ALIGNMENT:"+sSub.get(0)+" "+tSub.get(0)+" BY SUBCLASSES");
continue;
}
double[][] sims = new double[sSub.size()][sSub.size()];
for (int k = 0; k < sSub.size(); k++) {
for (int t = 0; t < tSub.size(); t++) {
if (verbose) System.out.println(sSub.get(k).getLocalName()+" "+tSub.get(t).getLocalName());
sims[k][t] = superclassesComparison(sSub.get(k), tSub.get(t));
}
}
if(verbose){
System.out.println("class1: "+cl1.getLocalName());
System.out.println("class2: "+cl2.getLocalName());
Utils.printMatrix(sims);
}
}
}
}
}
private void initHashMaps() {
sourceRestrictions = new HashMap<Node, List<Restriction>>();
targetRestrictions = new HashMap<Node, List<Restriction>>();
restrictions = new HashMap<Restriction, Node>();
for (int i = 0; i < sourcePropList.size(); i++) {
sourceRestrictions.put(sourcePropList.get(i),
getRestrictionsOnProperty(sourceClassList, sourcePropList.get(i)));
}
for (int i = 0; i < targetPropList.size(); i++) {
targetRestrictions.put(targetPropList.get(i),
getRestrictionsOnProperty(targetClassList, targetPropList.get(i)));
}
}
private void matchPropertyUsage() {
if(verbose){
System.out.println("MATCH PROPERTY USAGE");
}
Node sProp;
Node tProp;
List<Restriction> l1;
List<Restriction> l2;
ArrayList<Double> similarities = new ArrayList<Double>();
for (int i = 0; i < sourcePropList.size(); i++) {
sProp = sourcePropList.get(i);
l1 = sourceRestrictions.get(sProp);
//System.out.println("prop: "+sProp.getLocalName()+" size: "+l1.size());
similarities = new ArrayList<Double>();
for (int j = 0; j < targetPropList.size(); j++) {
tProp = targetPropList.get(j);
OntProperty sp = (OntProperty) sProp.getResource().as(OntProperty.class);
OntProperty tp = (OntProperty) tProp.getResource().as(OntProperty.class);
if((sp.isDatatypeProperty() && !tp.isDatatypeProperty())||
(!sp.isObjectProperty() && tp.isObjectProperty())){
similarities.add(0.0);
continue;
}
l2 = targetRestrictions.get(tProp);
if(l1.size()!=l2.size() || l1.size()==0){
similarities.add(0.0);
continue;
}
if(verbose){
System.out.println(sProp.getLocalName()+" "+tProp.getLocalName());
}
double[][] sims = new double[l1.size()][l1.size()];
for(int t=0; t<l1.size(); t++){
Restriction r1 = l1.get(t);
for(int k=0; k<l2.size(); k++){
Restriction r2 = l2.get(k);
sims[t][k] = restrictionUsageSimilarity(r1,r2);
}
}
//Obtain suboptimal solution
double usSim = Utils.optimalAlignment(sims);
similarities.add(usSim);
//Utils.printMatrix(sims);
if(verbose){
System.out.println("subSim: "+usSim);
}
}
//System.out.println(similarities);
int index = Utils.getOnlyMax(similarities);
if(verbose){
System.out.println("onlyMax: "+index);
}
if(index!=-1 && verbose) System.out.println(similarities.get(index));
if(index!=-1 && similarities.get(index)>parameters.getPropertyUsageThreshold()){
Mapping m = new Mapping( sProp, targetPropList.get(index), similarities.get(index));
m.setProvenance(PROPERTY_USAGE);
if(parameters.boostPropertyUsage) m.setSimilarity(1.0d);
propertiesMatrix.set(i, index, m);
if( Core.DEBUG_FCM ) System.out.println("ALIGNMENT:"+sProp.getLocalName()+" "+targetPropList.get(index).getLocalName()+" BY PROPERTY USAGE");
}
}
}
private double restrictionUsageSimilarity(Restriction r1, Restriction r2) {
try {
double restrSim = restrictionSimilarity(r1, r2, false);
double resSim = compareResources(restrictions.get(r1).getResource(), restrictions.get(r1).getResource());
return (2*restrSim+resSim)/3;
} catch( Exception e ) {
e.printStackTrace();
return 0d;
}
}
private List<Restriction> getRestrictionsOnProperty( List<Node> classList,
Node sProp) {
ArrayList<Restriction> restr = new ArrayList<Restriction>();
for(Node cl: classList){
OntClass ontClass = (OntClass)cl.getResource().as(OntClass.class);
try {
for(Object o: ontClass.listSuperClasses().toList()){
OntClass supClass = (OntClass) o;
if(supClass.isRestriction()){
Restriction r = supClass.asRestriction();
restrictions.put(r, cl);
if(r.getOnProperty().equals((OntProperty)sProp.getResource().as(OntProperty.class)))
restr.add(r);
}
}
} catch( Exception e ) {
e.printStackTrace();
}
}
return restr;
}
/* Find UnionClass types and match member classes
* @author Ulas
*/
private void matchUnionClasses(){
if(verbose) System.out.println("MATCH UNION");
ArrayList<UnionClass> unionClassesS = new ArrayList<UnionClass>();
ArrayList<UnionClass> unionClassesT = new ArrayList<UnionClass>();
ExtendedIterator<UnionClass> its = getSourceOntology().getModel().listUnionClasses();
ExtendedIterator<UnionClass> itt = getTargetOntology().getModel().listUnionClasses();
while(its.hasNext()){
UnionClass uc = its.next();
unionClassesS.add(uc);
}
while(itt.hasNext()){
UnionClass uc = itt.next();
unionClassesT.add(uc);
}
//System.out.println();
for(int k = 0; k < unionClassesS.size(); k++){
for(int m = 0; m < unionClassesT.size(); m++){
try {
matchUnionClassMember(unionClassesS.get(k), unionClassesT.get(m));
} catch( Exception e ) {
e.printStackTrace();
}
}
}
}
/* Matches member classes of two union classes
* @author Ulas
* @param UnionClass, UnionClass
*/
private void matchUnionClassMember(UnionClass a, UnionClass b){
ArrayList<OntClass> aList = new ArrayList<OntClass>();
for (ExtendedIterator<? extends OntClass> e = a.listOperands(); e.hasNext(); ) {
Resource r0 = (Resource) e.next();
OntClass unionMember = (OntClass) r0.as( OntClass.class );
aList.add(unionMember);
//System.out.print(" " + unionMember.getLocalName());
}
ArrayList<OntClass> bList = new ArrayList<OntClass>();
for (ExtendedIterator<? extends OntClass> e = b.listOperands(); e.hasNext(); ) {
Resource r0 = (Resource) e.next();
OntClass unionMember = (OntClass) r0.as( OntClass.class );
bList.add(unionMember);
//System.out.print(" " + unionMember.getLocalName());
}
if(aList.size() > 2 || bList.size() > 2){ return;}
boolean matchedS0 = false;
boolean matchedT0 = false;
boolean matchedS1 = false;
boolean matchedT1 = false;
for (int i = 0; i < sourceOntology.getClassesList().size(); i++) {
for (int j = 0; j < targetOntology.getClassesList().size(); j++) {
Mapping aln = null;
try{
aln = classesMatrix.get(i, j);
if(aln==null) continue;
Node currentNode = aln.getEntity1();
OntClass currentClassS = (OntClass) currentNode.getResource().as(OntClass.class);
if(currentClassS.equals(aList.get(0))){
matchedS0 = true;
//System.out.println("current: "+currentClassS);
Node n = classesMatrix.get(i, j).getEntity2();
double sims = classesMatrix.get(i,j).getSimilarity();
OntClass cT = (OntClass) n.getResource().as(OntClass.class);
if(verbose){
System.out.println(sourceClassList.get(i)+" "+targetClassList.get(j));
System.out.println("sims:"+sims);
System.out.println("cT:"+cT);
System.out.println("aList:"+aList);
System.out.println("bList:"+bList);
System.out.println("eq:"+cT.equals(bList.get(0))+" sims:"+sims);
}
if(cT.equals(bList.get(0)) && sims > 0.8){
//Align 1 and 1 Here
//int index1 = findSourceIndex(aList.get(0));
int i1 = getIndex(sourceClassList,aList.get(1).getURI());
int i2 = getIndex(targetClassList,bList.get(1).getURI());
if(i1==-1 || i2==-1) continue;
double sim1 = classesMatrix.getRowMaxValues(i1, 1)[0].getSimilarity();
double sim2 = classesMatrix.getColMaxValues(i2, 1)[0].getSimilarity();
if(verbose){
System.out.println(aList.get(0).getLocalName()+" "+bList.get(1).getLocalName());
System.out.println(sourceClassList.get(i)+" "+targetClassList.get(j));
System.out.println("sim1:"+sim1+" sim2:"+sim2);
}
if(sim1 < 0.6d && sim2 < 0.6d){
if(Core.DEBUG_FCM) System.out.println("ALIGNMENT:"+aList.get(1)+" "+bList.get(1)+" BY ULAS");
// classesMatrix.set(findSourceIndex(aList.get(1)), findTargetIndex(bList.get(1)),
// new Mapping(findSourceNode(aList.get(1)), findTargetNode(bList.get(1)), 1.0d));
int c1 = findSourceIndex(aList.get(1));
int c2 = findTargetIndex(bList.get(1));
if(i1==-1 || i2==-1) continue;
Mapping m = new Mapping(sourceClassList.get(c1), targetClassList.get(c2), 1.0d);
m.setProvenance(UNION_CLASSES);
classesMatrix.set(c1, c2, m);
if(verbose) System.out.println("ALIGNMENT:"+aList.get(1)+" "+bList.get(1)+" BY ULAS1");
}
}
else{
if(cT.equals(bList.get(1)) && sims > 0.8){
//Align 1 and 0 here
int i1 = getIndex(sourceClassList,aList.get(1).getURI());
int i2 = getIndex(targetClassList,bList.get(0).getURI());
if(i1==-1 || i2==-1) continue;
double sim1 = classesMatrix.getRowMaxValues(i1, 1)[0].getSimilarity();
double sim2 = classesMatrix.getColMaxValues(i2, 1)[0].getSimilarity();
if(sim1 < 0.6 && sim2 < 0.6d){
int c1 = findSourceIndex(aList.get(1));
int c2 = findTargetIndex(bList.get(0));
Mapping m = new Mapping(sourceClassList.get(c1), targetClassList.get(c2), 1.0d);
m.setProvenance(UNION_CLASSES);
classesMatrix.set(c1, c2, m);
if(verbose) System.out.println("ALIGNMENT:"+aList.get(1)+" "+bList.get(0)+" BY ULAS2");
}
}
}
}
else if(currentClassS.equals(aList.get(1))){
matchedS1 = true;
Node n = classesMatrix.get(i, j).getEntity2();
double sims = classesMatrix.get(i,j).getSimilarity();
OntClass cT = (OntClass) n.getResource().as(OntClass.class);
if(cT.equals(bList.get(0)) && sims > 0.8){
//Align 0 and 1 Here
double sim1 = classesMatrix.getRowMaxValues(findSourceIndex(aList.get(0)), 1)[0].getSimilarity();
double sim2 = classesMatrix.getColMaxValues(findTargetIndex(bList.get(1)), 1)[0].getSimilarity();
if(sim1 < 0.6d && sim2 < 0.6d){
int c1 = findSourceIndex(aList.get(0));
int c2 = findTargetIndex(bList.get(1));
Mapping m = new Mapping(sourceClassList.get(c1), targetClassList.get(c2), 1.0d);
m.setProvenance(UNION_CLASSES);
classesMatrix.set(c1, c2, m);
System.out.println("ALIGNMENT:"+aList.get(0)+" "+bList.get(1)+" BY ULAS3");
//System.out.println("A");
// classesMatrix.set(findSourceIndex(aList.get(0)), findTargetIndex(bList.get(1)),
// new Mapping(findSourceNode(aList.get(0)), findTargetNode(bList.get(1)), 1.0d));
// System.out.println();
}
}
else{
if(cT.equals(bList.get(1)) && sims > 0.8){
//Align 0 and 0 here
double sim1 = classesMatrix.getRowMaxValues(findSourceIndex(aList.get(0)), 1)[0].getSimilarity();
double sim2 = classesMatrix.getColMaxValues(findTargetIndex(bList.get(0)), 1)[0].getSimilarity();
if(sim1 < 0.6d && sim2 < 0.6d){
int c1 = findSourceIndex(aList.get(0));
int c2 = findTargetIndex(bList.get(0));
Mapping m = new Mapping(sourceClassList.get(c1), targetClassList.get(c2), 1.0d);
m.setProvenance(UNION_CLASSES);
classesMatrix.set(c1, c2, m);
System.out.println("ALIGNMENT:"+aList.get(0)+" "+bList.get(0)+" BY ULAS2");
// System.out.println("B");
// classesMatrix.set(findSourceIndex(aList.get(0)), findTargetIndex(bList.get(0)),
// new Mapping(findSourceNode(aList.get(0)), findTargetNode(bList.get(0)), 1.0d));
// System.out.println();
}
}
}
}
}
catch(Exception e){
//e.printStackTrace();
}
}
}
}
/* Finds index of a source class in the matrix
* @author Ulas
* @param OntClass
* @return int index
*/
private int findSourceIndex(OntClass c){
// Mapping aln = null;
// for(int i = 0; i < sourceOntology.getClassesList().size(); i++) {
// try{
// aln = classesMatrix.get(i, 0);
// Node currentNode = aln.getEntity1();
// OntClass currentClassS = (OntClass) currentNode.getResource().as(OntClass.class);
// if(c.equals(currentClassS)){
// return i;
// }
// }
// catch(Exception e){
// }
// }
// return -1;
return getIndex(sourceClassList, c.getURI());
}
/* Find Node type of a source class in the matrix
* @author Ulas
* @param OntClass
* @return Node represents the OntClass
*/
private Node findSourceNode(OntClass c){
Mapping aln = null;
for(int i = 0; i < sourceOntology.getClassesList().size(); i++) {
try{
aln = classesMatrix.get(i, 0);
Node currentNode = aln.getEntity1();
OntClass currentClassS = (OntClass) currentNode.getResource().as(OntClass.class);
if(c.equals(currentClassS)){
return currentNode;
}
}
catch(Exception e){
}
}
return null;
}
/* Finds index of a target class in the matrix
* @author Ulas
* @param OntClass
* @return int index
*/
private int findTargetIndex(OntClass c){
// Mapping aln = null;
// for(int i = 0; i < targetOntology.getClassesList().size(); i++) {
// try{
// aln = classesMatrix.get(0, i);
// Node currentNode = aln.getEntity2();
// OntClass currentClassS = (OntClass) currentNode.getResource().as(OntClass.class);
// if(c.equals(currentClassS)){
// return i;
// }
// }
// catch(Exception e){
// }
// }
// return -1;
return getIndex(targetClassList, c.getURI());
}
/* Find Node type of a target class in the matrix
* @author Ulas
* @param OntClass
* @return Node represents the OntClass
*/
private Node findTargetNode(OntClass c){
Mapping aln = null;
for(int i = 0; i < targetOntology.getClassesList().size(); i++) {
try{
aln = classesMatrix.get(0, i);
Node currentNode = aln.getEntity2();
OntClass currentClassT = (OntClass) currentNode.getResource().as(OntClass.class);
if(c.equals(currentClassT)){
return currentNode;
}
}
catch(Exception e){
}
}
return null;
}
private void matchSuperclasses() {
ArrayList<Double> similarities = new ArrayList<Double>();
//Match classes based on Superclasses and types
for (int i = 0; i<sourceOntology.getClassesList().size(); i++) {
Node source = sourceOntology.getClassesList().get(i);
//You can print something once per class
similarities = new ArrayList<Double>();
double sim;
for (int j = 0; j<targetOntology.getClassesList().size(); j++) {
Node target = targetOntology.getClassesList().get(j);
sim = superclassesComparison(source,target);
classSimilarities[i][j].setSuperclasses(sim);
similarities.add(sim);
}
if(verbose) System.out.println(similarities);
int index = Utils.getOnlyMax(similarities);
if(verbose){
System.out.println("onlyMax: "+index);
if(index!=-1) System.out.println(similarities.get(index));
}
if(index!=-1 && similarities.get(index)>=parameters.getSuperclassThreshold()){
Mapping m = new Mapping(source, targetClassList.get(index), similarities.get(index));
m.setProvenance(SUBCLASSOF);
if(parameters.boostSubclassOf) m.setSimilarity(1.0d);
classesMatrix.set(i, index, m);
if( Core.DEBUG_FCM ) System.out.println("ALIGNMENT:"+source.getLocalName()+" "
+targetClassList.get(index).getLocalName()+" BY SUBCLASSOF");
}
verbose = false;
}
}
private double superclassesComparison(OntClass sClass, OntClass tClass) {
double subSim = 0.0d;
try {
List<OntClass> l1 = sClass.listSuperClasses().toList();
List<OntClass> l2 = tClass.listSuperClasses().toList();
if(l1.size()!=l2.size() || l1.size()==0) return 0.0d;
double[][] sims = new double[l1.size()][l1.size()];
for(int i=0; i<l1.size(); i++){
OntClass c1 = (OntClass) l1.get(i);
for(int j=0; j<l2.size(); j++){
OntClass c2 = (OntClass) l2.get(j);
sims[i][j] = superClassSimilarity(c1,c2);
}
}
//Obtain best matching solution
subSim = Utils.optimalAlignment(sims);
if(verbose){
Utils.printMatrix(sims);
System.out.println("subSim: "+subSim);
}
} catch( Exception e ) {
e.printStackTrace();
return 0.0d;
}
return subSim;
}
private double superclassesComparison(Node source, Node target) {
if(verbose)
System.out.println("SuperClassesComp: "+source.getLocalName()+","+target.getLocalName());
OntClass sClass = null;
OntClass tClass = null;
if(!source.getResource().canAs(OntClass.class) || !target.getResource().canAs(OntClass.class))
return 0.0;
sClass = (OntClass) source.getResource();
tClass = (OntClass) target.getResource();
return superclassesComparison(sClass, tClass);
}
private double superClassSimilarity(OntClass c1, OntClass c2) {
//System.out.println(c1+" "+c2);
if(c1.isRestriction() && c2.isRestriction()){
//System.out.println("RESTR");
try {
return restrictionSimilarity((Restriction)c1.as(Restriction.class),
(Restriction)c2.as(Restriction.class),true);
} catch( Exception e ) {
e.printStackTrace();
return 0d;
}
}
if(c1.getURI()!=null && c2.getURI()!=null){
//System.out.println("ALIGN");
return alignedClass(c1.getURI(),c2.getURI());
}
return 0.0;
}
private double restrictionSimilarity(Restriction r1, Restriction r2,boolean classes) throws Exception {
double sim = 0;
double onProp = 0;
try {
if(classes)
onProp = alignedProp(r1.getOnProperty().getURI(), r2.getOnProperty().getURI());
} catch( Exception e ) {
e.printStackTrace();
}
if(r1.isMaxCardinalityRestriction() && r2.isMaxCardinalityRestriction()){
MaxCardinalityRestriction m1 = r1.asMaxCardinalityRestriction();
MaxCardinalityRestriction m2 = r1.asMaxCardinalityRestriction();
if(m1.getMaxCardinality()==m2.getMaxCardinality())
sim++;
}
else if(r1.isMinCardinalityRestriction() && r2.isMinCardinalityRestriction()){
MinCardinalityRestriction m1 = r1.asMinCardinalityRestriction();
MinCardinalityRestriction m2 = r1.asMinCardinalityRestriction();
if(m1.getMinCardinality()==m2.getMinCardinality())
sim++;
}
else if(r1.isCardinalityRestriction() && r2.isCardinalityRestriction()){
try {
CardinalityRestriction c1 = r1.asCardinalityRestriction();
CardinalityRestriction c2 = r2.asCardinalityRestriction();
if(c1.getCardinality()==c2.getCardinality())
sim++;
} catch( Exception e ) {
e.printStackTrace();
}
}
else if(r1.isAllValuesFromRestriction() && r2.isAllValuesFromRestriction()){
AllValuesFromRestriction a1 = r1.asAllValuesFromRestriction();
AllValuesFromRestriction a2 = r2.asAllValuesFromRestriction();
double resSim = compareResources(a1.getAllValuesFrom(),a2.getAllValuesFrom());
if(resSim==1 && Utils.primitiveType(a1.getAllValuesFrom().getURI()))
resSim = 0.75;
sim += resSim;
}
if(classes)
return (sim*3+onProp)/4;
else return sim;
}
private void matchRangeAndDomain() {
for (int i = 0; i < sourcePropList.size(); i++) {
for (int j = 0; j < targetPropList.size(); j++){
double sim = rangeAndDomainSimilarity(sourcePropList.get(i),targetPropList.get(j));
propSimilarities[i][j].setRangeAndDomain(sim);
if(sim>=parameters.getRangeDomainThreshold()){
if( Core.DEBUG_FCM ) System.out.println("ALIGNMENT:"+sourcePropList.get(i).getLocalName()+" "
+targetPropList.get(j).getLocalName()+" BY RANGE/DOMAIN");
Mapping m = new Mapping(sourcePropList.get(i),targetPropList.get(j), sim);
m.setProvenance(RANGE_DOMAIN);
if(parameters.boostRangeDomain) m.setSimilarity(1.0d);
propertiesMatrix.set(i,j, m);
}
}
}
return;
}
private double rangeAndDomainSimilarity(Node source, Node target) {
if(!source.getResource().canAs(OntProperty.class) ||
!target.getResource().canAs(OntProperty.class))
return 0.0;
OntProperty sProp = (OntProperty) source.getResource().as(OntProperty.class);
OntProperty tProp = (OntProperty) target.getResource().as(OntProperty.class);
return rangeAndDomainSimilarity(sProp, tProp);
}
private double domainSimilarity(OntResource sDom, OntResource tDom) {
if(sDom.canAs(OntClass.class) && tDom.canAs(OntClass.class)){
OntClass c1 = sDom.asClass();
OntClass c2 = tDom.asClass();
if(!c1.isUnionClass() || !c2.isUnionClass()){
if(sDom.getURI()!=null && tDom.getURI()!=null){
if(sDom.getURI().equals(tDom.getURI()))
return 1;
else return alignedClass(sDom.getURI(), tDom.getURI());
}
}
else{
//BOTH UNION CLASSES
if(verbose){
System.out.println("Both union!!");
System.out.println(c1.getLocalName()+", "+c2.getLocalName());
}
UnionClass u1 = c1.asUnionClass();
UnionClass u2 = c2.asUnionClass();
List<? extends OntClass> l1 = u1.listOperands().toList();
List<? extends OntClass> l2 = u2.listOperands().toList();
if(l1.size() != l2.size() || l1.size()==0)
return 0.0;
double[][] sims = new double[l1.size()][l1.size()];
for(int i=0; i<l1.size(); i++){
OntResource r1 = (OntResource)l1.get(i);
for(int j=0; j<l2.size(); j++){
OntResource r2 = (OntResource) l2.get(j);
sims[i][j] = compareResources(r1, r2);
}
}
if(verbose){
System.out.println("UNION COMP:");
Utils.printMatrix(sims);
}
//Obtain suboptimal solution
double unionSim = Utils.optimalAlignment(sims);
if(verbose){
System.out.println("unionSim: "+unionSim);
}
if(unionSim>0) unionSim += 0.3;
return unionSim;
}
}
return 0;
}
public static double individualsComparison(List<Individual> sList, List<Individual> tList){
//Look at individuals
if(sList.size()==0 || tList.size()==0) return 0;
Individual sInd;
Individual tInd;
int count = 0;
for (int i = 0; i < sList.size(); i++) {
for (int j = 0; j < tList.size(); j++) {
sInd = sList.get(i);
tInd = tList.get(j);
if(!sInd.isAnon() && !tInd.isAnon()){
if(sInd.getLocalName().equals(tInd.getLocalName())){
count++;
}
}
}
}
return 2*count/(sList.size()+tList.size());
}
public double alignedClass(String sURI,String tURI){
int s = -1;
int t = -1;
for (int i = 0; i < sourceClassList.size(); i++) {
if(sourceClassList.get(i).getUri().equals(sURI))
s = i;
}
if(s==-1) return 0.0;
for (int i = 0; i < targetClassList.size(); i++) {
if(targetClassList.get(i).getUri().equals(tURI))
t = i;
}
if(t==-1) return 0.0;
return classesMatrix.getSimilarity(s, t);
}
public double alignedProp(String sURI,String tURI){
int s = -1;
int t = -1;
for (int i = 0; i < sourcePropList.size(); i++) {
if(sourcePropList.get(i).getUri().equals(sURI))
s = i;
}
if(s==-1) return 0.0;
for (int i = 0; i < targetPropList.size(); i++) {
if(targetPropList.get(i).getUri().equals(tURI))
t = i;
}
if(t==-1) return 0.0;
return propertiesMatrix.getSimilarity(s, t);
}
private int getIndex( List<Node> list, String uri) {
for (int i = 0; i < list.size(); i++) {
if(list.get(i).getUri().equals(uri))
return i;
}
return -1;
}
private double compareResources(Resource r1, Resource r2){
String uri1 = r1.getURI();
String uri2 = r2.getURI();
if(uri1==null || uri2==null) return 0.0;
if(uri1.equals(uri2))
return 1.0;
double simClass = alignedClass(uri1,uri2);
double simProp = alignedProp(uri1,uri2);
if(simClass > simProp)
return simClass;
else return simProp;
}
private double getNumberOfClassAlignments() {
double[][] matrix = classesMatrix.getCopiedSimilarityMatrix();
double sum = 0;
for (int i = 0; i < matrix.length; i++) {
for (int j = 0; j < matrix[0].length; j++) {
sum += matrix[i][j];
}
}
return sum;
}
private double getNumberOfPropAlignments() {
double[][] matrix = propertiesMatrix.getCopiedSimilarityMatrix();
double sum = 0;
for (int i = 0; i < matrix.length; i++) {
for (int j = 0; j < matrix[0].length; j++) {
sum += matrix[i][j];
}
}
return sum;
}
private void printPropValues() {
Iterator<OntProperty> it = sourcePropValues.keySet().iterator();
while(it.hasNext()){
OntProperty prop = it.next();
//System.out.println(prop);
//System.out.println(sourcePropValues.get(prop));
}
//System.out.println("TARGET");
//System.out.println(targetPropValues);
Iterator<OntProperty> it2 = targetPropValues.keySet().iterator();
while(it2.hasNext()){
OntProperty prop = it2.next();
//System.out.println(prop);
//System.out.println(targetPropValues.get(prop));
}
}
/**
* Input must be a Node representing a class. (i.e. Node.isClass() == true)
* @param currentNode Node object representing a class.
* @return List of OntResource object representing the individuals.
*/
public ArrayList<Individual> getIndividuals( Node currentNode ) {
ArrayList<Individual> individualsList = new ArrayList<Individual>();
OntClass currentClass = (OntClass) currentNode.getResource().as(OntClass.class);
ExtendedIterator indiIter = currentClass.listInstances(true);
while( indiIter.hasNext() ) {
Individual ci = (Individual) indiIter.next();
//if( ci.isAnon() ) System.out.println("\n************************\nProperties of individual:" + ci.getId() );
//else System.out.println("\n************************\nProperties of individual:" + ci.getLocalName() );
StmtIterator indiPropertiesIter = ci.listProperties();
while( indiPropertiesIter.hasNext() ) {
Statement currentProperty = indiPropertiesIter.nextStatement();
//System.out.println(currentProperty);
}
individualsList.add( ci );
}
// try to deal with improperly declared individuals. (from the 202 scrambled ontology)
if( individualsList.isEmpty() ) {
OntModel mod = (OntModel) currentClass.getModel();
List<Statement> ls = mod.listStatements(null , mod.getProperty("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), mod.getResource(currentClass.getLocalName())).toList();
Iterator<Statement> lsiter = ls.iterator();
int k = 1;
while( lsiter.hasNext() ) {
Statement s = lsiter.next();
Resource r = s.getSubject();
if( r.canAs(Individual.class) ) {
Individual indi = r.as(Individual.class);
Individual ci = indi;
//if( ci.isAnon() ) System.out.println("\n************************\nProperties of individual:" + ci.getId() );
//else System.out.println("\n************************\nProperties of individual:" + ci.getLocalName() );
StmtIterator indiPropertiesIter = ci.listProperties();
while( indiPropertiesIter.hasNext() ) {
Statement currentProperty = indiPropertiesIter.nextStatement();
RDFNode currentnode = currentProperty.getObject();
if( currentnode.canAs(Literal.class) ) {
Literal currentLiteral = (Literal) currentnode.as(Literal.class);
currentLiteral.getString();
}
//System.out.println(currentProperty);
}
individualsList.add(indi);
}
}
}
return individualsList;
}
private boolean matchIndividuals(Node source, Node target) {
boolean classMatched = false;
boolean individualMatched = false;
ArrayList<Individual> sourceIndi = getIndividuals(source);
ArrayList<Individual> targetIndi = getIndividuals(target);
for (Individual iSource: sourceIndi){
for (Individual iTarget: targetIndi) {
if (!iSource.isAnon() && !iTarget.isAnon()) { //if neither is anonymous
if (iSource.getLocalName().equals(iTarget.getLocalName())) {
classMatched = true;
individualMatched = recursiveMatchIndividuals(iSource, iTarget);
}
}
else if (iSource.isAnon() && iTarget.isAnon()) { //both anonnymous
individualMatched = recursiveMatchIndividuals(iSource, iTarget); //prop?
}
if (individualMatched) classMatched = true;
}
}
return classMatched;
}
private boolean recursiveMatchIndividuals(Individual iSource, Individual iTarget) {
boolean IndividualsMatched = false;
boolean propertyMatched = false;
List<Statement> sourceProperties = iSource.listProperties().toList();
List<Statement> targetProperties = iTarget.listProperties().toList();
// for (Statement s: sourceProperties) System.out.println(s);
for(int i=0;i<sourceProperties.size();i++){
for(int j=0;j<targetProperties.size();j++){
Statement sourceProperty = sourceProperties.get(i);
Statement targetProperty = targetProperties.get(j);
propertyMatched = false;
if (sourceProperty.getObject().isAnon() && targetProperty.getObject().isAnon()) {
// RDFNode subject = sourceProperty.getSubject();
// RDFNode object = sourceProperty.getObject();
// RDFNode prop = sourceProperty.getPredicate();
propertyMatched = recursiveMatchIndividuals((Individual)(sourceProperty.getObject().as(Individual.class)),
(Individual)(targetProperty.getObject().as(Individual.class)));
}
else {
Triple sourcePropTriple = (sourceProperty).asTriple();
Triple targetPropTriple = (targetProperty).asTriple();
//System.out.println("s:"+sourcePropTriple);
//System.out.println("t:"+targetPropTriple);
if(sourcePropTriple.getObject().equals(targetPropTriple.getObject())){
//System.out.println("EQUALS");
//System.out.println(sourcePropTriple.getPredicate().getURI());
String uri1 = sourcePropTriple.getPredicate().getURI();
String uri2 = targetPropTriple.getPredicate().getURI();
if(uri2.length()<20)
uri2 = targetOntology.getURI() + uri2;
//System.out.println(uri2);
Node source = get(sourcePropList, uri1);
//System.out.println(source);
Node target = get(targetPropList, uri2);
//System.out.println(target);
if (source != null && target != null) {
Mapping m = new Mapping(source, target, 1.0d);
m.setProvenance(RECURSIVE_INDIVIDUALS);
propertiesMatrix.set(sourcePropList.indexOf(source), targetPropList.indexOf(target), m);
//propertiesMatrix.setSimilarity(sourcePropList.indexOf(source), targetPropList.indexOf(target), 1.0);
}
propertyMatched = true;
}
}
if (propertyMatched) IndividualsMatched = true;
}
}
return IndividualsMatched;
}
private Node get( List<Node> nodeList, String uri) {
int ind = getIndex(nodeList, uri);
if(ind!=-1)
return nodeList.get(ind);
return null;
}
public ClassSimilarity getClassSimilarity(Mapping mapping){
if(mapping.getAlignmentType().equals(alignType.aligningProperties)) return null;
return classSimilarities[mapping.getSourceKey()][mapping.getTargetKey()];
}
public PropertySimilarity getPropertySimilarity(Mapping mapping){
if(mapping.getAlignmentType().equals(alignType.aligningClasses)) return null;
return propSimilarities[mapping.getSourceKey()][mapping.getTargetKey()];
}
public void setUseIndividuals(boolean useIndividuals){
individuals = useIndividuals;
}
@Override
public AbstractMatcherParametersPanel getParametersPanel(){
if(parametersPanel == null){
parametersPanel = new IterativeInstanceStructuralParametersPanel();
}
return parametersPanel;
}
public String allSimilarities(Alignment<Mapping> mappings){
String ret = "Source\tTarget\tSimilarity\tSyntactic\tRestrictions\tSuperclasses\tSubclasses\tProvenance\n";
for(Mapping mapping: mappings){
if(mapping.getAlignmentType() == alignType.aligningClasses)
ret += classMappingTuple(mapping) + "\n";
}
ret += "Source\tTarget\tSimilarity\tSyntactic\tRangeDomain\tValues\tSubproperties\tProvenance\n";
for(Mapping mapping: mappings){
if(mapping.getAlignmentType() == alignType.aligningProperties)
ret += propertyMappingTuple(mapping) + "\n";
}
return ret;
}
public String allSimilarities(alignType type, boolean onlyMappings){
if(classesAlignmentSet==null || propertiesAlignmentSet==null)
return null;
if(type == alignType.aligningClasses){
String ret = "Source\tTarget\tSimilarity\tSyntactic\tRestrictions\tSuperclasses\tSubclasses\tProvenance\n";
for(Mapping mapping: classesAlignmentSet){
ret += classMappingTuple(mapping) + "\n";
}
return ret;
}
else if(type == alignType.aligningProperties){
String ret = "Source\tTarget\tSimilarity\tSyntactic\tRangeDomain\tValues\tSubproperties\tProvenance\n";
PropertySimilarity sim = null;
for(Mapping mapping: propertiesAlignmentSet){
ret += propertyMappingTuple(mapping) + "\n";
}
return ret;
}
return null;
}
public String classMappingTuple(Mapping mapping){
String ret = mapping.getEntity1().getLocalName() + "\t" + mapping.getEntity2().getLocalName() +
"\t" + mapping.getSimilarity();
ClassSimilarity sim = getClassSimilarity(mapping);
ret += "\t" + sim.getSyntactic() + "\t" + sim.getRestrictions() + "\t" + sim.getSuperclasses()
+ "\t" + sim.getSubclasses();
if(mapping.getProvenance()!=null) ret += "\t" + mapping.getProvenance();
else ret += "\t" + SYNTACTIC;
return ret;
}
public String propertyMappingTuple(Mapping mapping){
String ret = mapping.getEntity1().getLocalName() + "\t" + mapping.getEntity2().getLocalName() +
"\t" + mapping.getSimilarity();
PropertySimilarity sim = getPropertySimilarity(mapping);
ret += "\t" + sim.getSyntactic() + "\t" + sim.getRangeAndDomain() + "\t" + sim.getValues() +
"\t" + sim.getSubProperties();
if(mapping.getProvenance()!=null) ret += "\t" + mapping.getProvenance();
else ret += "\t" + SYNTACTIC;
return ret;
}
private void printAllSimilarities() {
for (int i = 0; i < classSimilarities.length; i++) {
for (int j = 0; j < classSimilarities[0].length; j++) {
System.out.println(sourceClassList.get(i)+" "+targetClassList.get(j)
+classesMatrix.getSimilarity(i, j));
System.out.println(classSimilarities[i][j]);
}
}
for (int i = 0; i < propSimilarities.length; i++) {
for (int j = 0; j < propSimilarities[0].length; j++) {
System.out.println(sourcePropList.get(i)+" "+targetPropList.get(j)
+ " " + propertiesMatrix.getSimilarity(i, j));
System.out.println(propSimilarities[i][j]);
}
}
}
} | AgreementMaker-Matchers/IterativeInstanceStructuralMatcher/src/iterativeinstancestructuralmatcher/internal/IterativeInstanceStructuralMatcher.java | package iterativeinstancestructuralmatcher.internal;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import am.AMException;
import am.Utility;
import am.app.Core;
import am.app.mappingEngine.AbstractMatcher;
import am.app.mappingEngine.AbstractMatcherParametersPanel;
import am.app.mappingEngine.Alignment;
import am.app.mappingEngine.Mapping;
import am.app.mappingEngine.MatcherFactory;
import am.app.mappingEngine.MatchersRegistry;
import am.app.mappingEngine.referenceAlignment.ReferenceAlignmentMatcher;
import am.app.mappingEngine.referenceAlignment.ReferenceEvaluationData;
import am.app.mappingEngine.referenceAlignment.ReferenceEvaluator;
import am.app.mappingEngine.similarityMatrix.ArraySimilarityMatrix;
import am.app.ontology.Node;
import am.app.ontology.Ontology;
import am.userInterface.MatcherParametersDialog;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.ontology.AllValuesFromRestriction;
import com.hp.hpl.jena.ontology.CardinalityRestriction;
import com.hp.hpl.jena.ontology.Individual;
import com.hp.hpl.jena.ontology.MaxCardinalityRestriction;
import com.hp.hpl.jena.ontology.MinCardinalityRestriction;
import com.hp.hpl.jena.ontology.OntClass;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntProperty;
import com.hp.hpl.jena.ontology.OntResource;
import com.hp.hpl.jena.ontology.Restriction;
import com.hp.hpl.jena.ontology.UnionClass;
import com.hp.hpl.jena.rdf.model.Literal;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.util.iterator.ExtendedIterator;
public class IterativeInstanceStructuralMatcher extends AbstractMatcher {
private static final long serialVersionUID = 3612931342445940115L;
double PROPERTY_THRESHOLD = 0.8;
double CLASS_THRESHOLD = 0.8;
boolean individuals = true;
boolean matchUnionClasses = true;
boolean printMappingTable = false;
static boolean verbose = false;
private List<Node> sourceClassList;
private List<Node> targetClassList;
private List<Node> sourcePropList;
private List<Node> targetPropList;
private transient HashMap<Node, List<Restriction>> sourceRestrictions;
private transient HashMap<Node, List<Restriction>> targetRestrictions;
private transient HashMap<Restriction, Node> restrictions;
private transient HashMap<OntProperty, List<String>> sourcePropValues;
private transient HashMap<OntProperty, List<String>> targetPropValues;
private PropertySimilarity[][] propSimilarities;
private ClassSimilarity[][] classSimilarities;
//Provenance Strings
private final String RECURSIVE_INDIVIDUALS = "Recursive Individuals";
private final String SUBPROPERTIES = "Subproperties";
private final String SUBCLASSES = "Subclasses";
private final String COMBINATION = "Combination";
private final String PROPERTY_VALUES = "Property Values";
private final String PROPERTY_USAGE = "Property Usage";
private final String UNION_CLASSES = "Union Classes";
private final String SUBCLASSOF = "SubclassOf";
private final String RANGE_DOMAIN = "Range Domain";
private final String SYNTACTIC = "Syntactic";
IterativeInstanceStructuralParameters parameters;
public IterativeInstanceStructuralMatcher(){
super();
minInputMatchers = 0;
maxInputMatchers = 1;
needsParam = true;
setName("Iterative Instance and Structural Matcher");
//progressDisplay = new MatchingProgressDisplay();
}
@Override
public void matchEnd() {
// TODO Auto-generated method stub
super.matchEnd();
if(printMappingTable)
evaluate();
}
// @SuppressWarnings("unchecked")
@Override
protected void align() throws Exception {
if (sourceOntology == null || targetOntology == null)
return; // cannot align just one ontology
if(param!=null)
parameters = (IterativeInstanceStructuralParameters)param;
sourceClassList = sourceOntology.getClassesList();
targetClassList = targetOntology.getClassesList();
sourcePropList = sourceOntology.getPropertiesList();
targetPropList = targetOntology.getPropertiesList();
//Initialize maps for information about restrictions
initHashMaps();
receiveInputMatrices();
initSimilarityMatrices();
if(individuals){
sourcePropValues = initPropValues(sourcePropList,sourceOntology);
targetPropValues = initPropValues(targetPropList,targetOntology);
}
printPropValues();
if(individuals){
//Match properties by similar values
if(parameters.usePropertyUsage)
matchPropertyValues();
Node source;
Node target;
for (int i = 0; i < sourceClassList.size(); i++) {
source = sourceClassList.get(i);
for (int j = 0; j < targetClassList.size(); j++) {
target = targetClassList.get(j);
if(matchIndividuals(source,target)){
Mapping m = new Mapping(source, target, 1.0);
m.setProvenance(RECURSIVE_INDIVIDUALS);
classesMatrix.set(i, j, m);
}
}
}
}
//Iterative part
for (int i = 0; ; i++) {
double totAlign = getNumberOfClassAlignments() + getNumberOfPropAlignments();
//Match by superclasses and restriction on properties
if(parameters.useSuperclasses)
matchSuperclasses();
//Match properties by range and domain
if(parameters.useRangeDomain)
matchRangeAndDomain();
//Match properties by their presence in restrictions
if(parameters.useRangeDomain)
matchPropertyUsage();
//match sons of aligned classes
matchSubClasses();
//match sons of aligned properties
matchSubProperties();
findNewAlignments();
double totAlign2 = getNumberOfClassAlignments() + getNumberOfPropAlignments();
if(totAlign2==totAlign){
if( Core.DEBUG_FCM ) System.out.println("CONVERGED IN "+(i+1)+" ITERATIONS");
break;
}
}
if( matchUnionClasses ) matchUnionClasses();
filterNonOntologyAlignments();
//printAllSimilarities();
//evaluate();
}
private void findNewAlignments() {
double sim;
for (int i = 0; i < classSimilarities.length; i++) {
for (int j = 0; j < classSimilarities[0].length; j++) {
sim = classSimilarities[i][j].getSimilarity();
if(sim > classesMatrix.getSimilarity(i, j)){
Mapping m = new Mapping( sourceClassList.get(i), targetClassList.get(j), sim );
m.setProvenance(COMBINATION);
classesMatrix.set(i, j, m);
}
}
}
for (int i = 0; i < propSimilarities.length; i++) {
for (int j = 0; j < propSimilarities[0].length; j++) {
//System.out.print(sourcePropList.get(i)+" "+targetPropList.get(j));
sim = propSimilarities[i][j].getSimilarity();
if(sim > propertiesMatrix.getSimilarity(i, j)){
Mapping m = new Mapping( sourcePropList.get(i), targetPropList.get(j), sim );
m.setProvenance(COMBINATION);
propertiesMatrix.set(i, j, m);
}
}
}
}
private void initSimilarityMatrices() {
classSimilarities = new ClassSimilarity[sourceClassList.size()][targetClassList.size()];
propSimilarities = new PropertySimilarity[sourcePropList.size()][targetPropList.size()];
for (int i = 0; i < sourceClassList.size(); i++) {
for (int j = 0; j < targetClassList.size(); j++) {
classSimilarities[i][j] = new ClassSimilarity();
classSimilarities[i][j].setSyntactic(classesMatrix.getSimilarity(i, j));
}
}
for (int i = 0; i < sourcePropList.size(); i++) {
for (int j = 0; j < targetPropList.size(); j++) {
propSimilarities[i][j] = new PropertySimilarity();
propSimilarities[i][j].setSyntactic(propertiesMatrix.getSimilarity(i, j));
}
}
}
private void receiveInputMatrices() {
if(inputMatchers.size()>0){
AbstractMatcher input = inputMatchers.get(0);
//classesMatrix = input.getClassesMatrix();
try {
classesMatrix = new ArraySimilarityMatrix(input.getClassesMatrix());
} catch( AMException e ) {
e.printStackTrace();
}
//propertiesMatrix = input.getPropertiesMatrix();
try {
propertiesMatrix = new ArraySimilarityMatrix(input.getPropertiesMatrix());
} catch( AMException e ) {
e.printStackTrace();
}
//System.out.println();
}
else{
classesMatrix = new ArraySimilarityMatrix(sourceOntology, targetOntology, alignType.aligningClasses);
propertiesMatrix = new ArraySimilarityMatrix(sourceOntology, targetOntology, alignType.aligningProperties);
}
}
private void filterNonOntologyAlignments() {
for (int i = 0; i < sourceClassList.size(); i++) {
if(!sourceClassList.get(i).getUri().startsWith(sourceOntology.getURI())){
for (int j = 0; j < targetClassList.size(); j++) {
classesMatrix.set(i, j, null);
}
}
}
for (int j = 0; j < targetClassList.size(); j++) {
if(!targetClassList.get(j).getUri().startsWith(targetOntology.getURI())){
for (int i = 0; i < sourceClassList.size(); i++) {
classesMatrix.set(i, j, null);
}
}
}
for (int i = 0; i < sourcePropList.size(); i++) {
if(!sourcePropList.get(i).getUri().startsWith(sourceOntology.getURI())){
for (int j = 0; j < targetPropList.size(); j++) {
propertiesMatrix.set(i, j, null);
}
}
}
for (int j = 0; j < targetPropList.size(); j++) {
if(!targetPropList.get(j).getUri().startsWith(targetOntology.getURI())){
for (int i = 0; i < sourcePropList.size(); i++) {
propertiesMatrix.set(i, j, null);
}
}
}
}
private void evaluate() {
ReferenceAlignmentMatcher refMatcher = (ReferenceAlignmentMatcher)MatcherFactory.getMatcherInstance(MatchersRegistry.ImportAlignment,0);
MatcherParametersDialog dialog = new MatcherParametersDialog(refMatcher,false,false);
if(dialog.parametersSet()) {
refMatcher.setParam(dialog.getParameters());
refMatcher.setThreshold(refMatcher.getDefaultThreshold());
refMatcher.setMaxSourceAlign(refMatcher.getDefaultMaxSourceRelations());
refMatcher.setMaxTargetAlign(refMatcher.getDefaultMaxTargetRelations());
try {
refMatcher.match();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
String report="Reference Evaluation Complete\n\n";
ReferenceEvaluationData rd = ReferenceEvaluator.compare(getAlignment(), refMatcher.getAlignment());
setRefEvaluation(rd);
System.out.println("CORRECT MAPPINGS");
System.out.println(allSimilarities(rd.getCorrectAlignments()));
System.out.println("WRONG MAPPINGS");
System.out.println(allSimilarities(rd.getErrorAlignments()));
System.out.println("MISSED MAPPINGS");
System.out.println(allSimilarities(rd.getLostAlignments()));
report+= getRegistryEntry().getMatcherName()+"\n\n";
report +=rd.getReport()+"\n";
Utility.displayTextAreaPane(report,"Reference Evaluation Report");
}
dialog.dispose();
Core.getUI().redisplayCanvas();
}
private void matchPropertyValues() {
if( Core.DEBUG_FCM ) System.out.println("MATCH PROPERTY VALUES");
OntProperty sProp;
OntProperty tProp;
for (int i = 0; i < sourcePropList.size() ; i++) {
sProp = (OntProperty)sourcePropList.get(i).getResource().as(OntProperty.class);
List<String> sList = sourcePropValues.get(sProp);
for (int j = 0; j < targetPropList.size(); j++) {
tProp = (OntProperty)targetPropList.get(j).getResource().as(OntProperty.class);
if(!sProp.getURI().startsWith(sourceOntology.getURI())||
!tProp.getURI().startsWith(targetOntology.getURI()))
continue;
List<String> tList = targetPropValues.get(tProp);
if(sList.size()==0 || tList.size()==0) continue;
if( Core.DEBUG_FCM ) System.out.println(sProp.getLocalName()+" "+tProp.getLocalName()+" litsize: "+sList.size()+" "+tList.size());
double sim = 0;
String l1;
String l2;
for (int k = 0; k < sList.size(); k++) {
l1 = sList.get(k);
for (int t = 0; t < tList.size(); t++) {
l2 = tList.get(t);
if(l1.equals(l2)){
sim++;
}
}
}
sim = sim / Math.max(sList.size(),tList.size());
propSimilarities[i][j].setValues(sim);
if(sim >= parameters.getPropertyValuesThreshold()){
Mapping m = new Mapping(sourcePropList.get(i), targetPropList.get(j), sim);
m.setProvenance(PROPERTY_VALUES);
if(parameters.boostPropertyValues) m.setSimilarity(1.0d);
propertiesMatrix.set(i, j, m);
if( Core.DEBUG_FCM ) System.out.println("ALIGNMENT:"+sProp.getLocalName()+" "+tProp.getLocalName()+" BY PROP VALUES");
}
}
}
}
@SuppressWarnings("unchecked")
private HashMap<OntProperty, List<String>> initPropValues(List<Node> propList,Ontology ontology) {
HashMap<OntProperty, List<String>> propValues = new HashMap<OntProperty, List<String>>();
List<Statement> stmts;
List<String> literals;
for (int i = 0; i < propList.size(); i++) {
OntProperty sProp = (OntProperty)propList.get(i).getResource().as(OntProperty.class);
//System.out.println("Prop: "+sProp);
literals = new ArrayList<String>();
stmts = ontology.getModel().listStatements(null, sProp, (RDFNode)null).toList();
if( stmts.isEmpty() ) { stmts = ontology.getModel().listStatements(null, ontology.getModel().getProperty(sProp.getLocalName()) ,(RDFNode)null).toList(); }
for (int j = 0; j < stmts.size(); j++) {
Statement s = stmts.get(j);
//System.out.println(s);
RDFNode obj = s.getObject();
if(obj.isLiteral()){
Literal l = (Literal)obj;
if(!literals.contains(l.getString()))
literals.add(l.getString());
}
}
propValues.put(sProp, literals);
}
return propValues;
}
private void matchSubProperties() {
if(verbose)
if( Core.DEBUG_FCM ) System.out.println("MATCH SUBPROPERTIES");
ArrayList<OntProperty> sSub;
ArrayList<OntProperty> tSub;
for (int i = 0; i < sourcePropList.size(); i++) {
sSub = new ArrayList<OntProperty>();
OntProperty pr1 = (OntProperty)sourcePropList.get(i).getResource().as(OntProperty.class);
ExtendedIterator it1 = pr1.listSubProperties();
while(it1.hasNext()){
sSub.add((OntProperty)it1.next());
}
for (int j = 0; j < targetPropList.size(); j++){
tSub = new ArrayList<OntProperty>();
OntProperty pr2 = (OntProperty)targetPropList.get(j).getResource().as(OntProperty.class);
ExtendedIterator it2 = pr2.listSubProperties();
while(it2.hasNext()){
tSub.add((OntProperty)it2.next());
}
if(alignedProp(pr1.getURI(),pr2.getURI())>=PROPERTY_THRESHOLD &&
sSub.size()==tSub.size() && sSub.size()>0){
if(verbose){
if( Core.DEBUG_FCM ) System.out.println("size: "+sSub.size());
if( Core.DEBUG_FCM ) System.out.println("prop1: "+pr1.getLocalName());
if( Core.DEBUG_FCM ) System.out.println(sSub);
if( Core.DEBUG_FCM ) System.out.println("prop2: "+pr2.getLocalName());
if( Core.DEBUG_FCM ) System.out.println(tSub);
}
for (int k = 0; k < sSub.size(); k++) {
for (int t = 0; t < tSub.size(); t++) {
if(alignedProp(sSub.get(k).getURI(),tSub.get(t).getURI())>=PROPERTY_THRESHOLD){
sSub.remove(k);
tSub.remove(t);
k--;
t--;
break;
}
}
}
if(verbose){
System.out.println("Still to align: "+sSub.size());
}
if(sSub.size()==0) continue;
if(sSub.size()==1){
int row = getIndex(sourcePropList,sSub.get(0).getURI());
int col = getIndex(targetPropList,tSub.get(0).getURI());
Mapping m = new Mapping( sourcePropList.get(row), targetPropList.get(col), 1.0d);
m.setProvenance(SUBPROPERTIES);
propertiesMatrix.set( row, col, m);
if( Core.DEBUG_FCM ) System.out.println("ALIGNMENT:"+sSub.get(0).getLocalName()+" "+
tSub.get(0).getLocalName()+" BY SUBPROPERTIES");
continue;
}
double[][] sims = new double[sSub.size()][sSub.size()];
for (int k = 0; k < sSub.size(); k++) {
for (int t = 0; t < tSub.size(); t++) {
sims[k][t] = rangeAndDomainSimilarity(sSub.get(k), tSub.get(t));
}
}
List<AlignIndexes> aligns = Utils.optimalAlignments(sims);
for (int k = 0; k < aligns.size(); k++) {
if( Core.DEBUG_FCM ) System.out.println(aligns.get(k).getX()+" "+aligns.get(k).getY());
int row = getIndex(sourcePropList,sSub.get(aligns.get(k).getX()).getURI());
int col = getIndex(targetPropList,tSub.get(aligns.get(k).getY()).getURI());
Mapping m = new Mapping( sourcePropList.get(row), targetPropList.get(col), 1.0d);
m.setProvenance(SUBPROPERTIES);
propertiesMatrix.set( row, col, m);
if( Core.DEBUG_FCM ) System.out.println("ALIGNMENT:"+sSub.get(aligns.get(k).getX()).getLocalName()+" "
+tSub.get(aligns.get(k).getY()).getLocalName()+ " BY SUBPROPERTIES");
}
if(verbose){
Utils.printMatrix(sims);
}
}
}
}
return;
}
private double rangeAndDomainSimilarity(OntProperty sProp,
OntProperty tProp) {
if((sProp.getDomain()==null && tProp.getDomain()!=null) ||
(sProp.getDomain()!=null && tProp.getDomain()==null))
return 0.0;
if((sProp.getRange()==null && tProp.getRange()!=null) ||
(sProp.getRange()!=null && tProp.getRange()==null))
return 0.0;
if(sProp.isDatatypeProperty() && !tProp.isDatatypeProperty() ||
!sProp.isDatatypeProperty() && tProp.isDatatypeProperty())
return 0.0;
if(verbose){
System.out.println(sProp.getLocalName()+","+tProp.getLocalName());
}
double rangeSim = 0;
double domainSim = 0;
boolean unions = false;
if(sProp.getDomain()==null && tProp.getDomain()==null){
domainSim = 0.8;
}
else{
try {
domainSim = domainSimilarity(sProp.getDomain(),tProp.getDomain());
if(sProp.getDomain().asClass().isUnionClass() &&
tProp.getDomain().asClass().isUnionClass())
unions = true;
} catch( Exception e ) {
e.printStackTrace();
domainSim = 0.8;
}
}
if(sProp.getRange()!=null && tProp.getRange()!=null)
rangeSim = compareResources(sProp.getRange(), tProp.getRange());
if(sProp.getRange()==null && tProp.getRange()==null)
rangeSim = 0.8;
if(!unions && tProp.getRange()!=null && tProp.getRange().getURI() != null &&
Utils.primitiveType(tProp.getRange().getURI()))
rangeSim *= 0.75;
/*
else if(sProp.isObjectProperty() && tProp.isObjectProperty()){
domainSim = alignedClass(sProp.getRange().getURI(), tProp.getRange().getURI());
}*/
if(verbose){
System.out.println("rangesim: "+rangeSim+" domsim: "+domainSim);
}
double sim = (rangeSim+domainSim)/2;
return sim;
}
private void matchSubClasses() {
if(verbose){
System.out.println("MATCH SUBCLASSES");
}
ArrayList<OntClass> sSub;
ArrayList<OntClass> tSub;
for (int i = 0; i < sourceClassList.size(); i++) {
sSub = new ArrayList<OntClass>();
OntClass cl1 = (OntClass)sourceClassList.get(i).getResource().as(OntClass.class);
ExtendedIterator it1 = cl1.listSubClasses();
while(it1.hasNext()){
sSub.add((OntClass)it1.next());
}
for (int j = 0; j < targetClassList.size(); j++){
tSub = new ArrayList<OntClass>();
OntClass cl2 = (OntClass)targetClassList.get(j).getResource().as(OntClass.class);
ExtendedIterator it2 = cl2.listSubClasses();
while(it2.hasNext()){
tSub.add((OntClass)it2.next());
}
if(alignedClass(cl1.getURI(),cl2.getURI())>=CLASS_THRESHOLD &&
sSub.size()==tSub.size() && sSub.size()>0){
if(verbose){
System.out.println("size: "+sSub.size());
System.out.println("class1: "+cl1.getLocalName());
System.out.println(sSub);
System.out.println("class2: "+cl2.getLocalName());
System.out.println(tSub);
}
for (int k = 0; k < sSub.size(); k++) {
for (int t = 0; t < tSub.size(); t++) {
if(alignedClass(sSub.get(k).getURI(),tSub.get(t).getURI())>=CLASS_THRESHOLD){
sSub.remove(k);
tSub.remove(t);
k--;
t--;
break;
}
}
}
if(verbose){
System.out.println("Still to align: "+sSub.size());
}
if(sSub.size()==1){
int row = getIndex(sourceClassList,sSub.get(0).getURI());
int col = getIndex(targetClassList,tSub.get(0).getURI());
Mapping m = new Mapping( sourceClassList.get(row), targetClassList.get(col), 1.0d);
m.setProvenance(SUBCLASSES);
classesMatrix.set(row, col, m);
if( Core.DEBUG_FCM ) System.out.println("ALIGNMENT:"+sSub.get(0)+" "+tSub.get(0)+" BY SUBCLASSES");
continue;
}
double[][] sims = new double[sSub.size()][sSub.size()];
for (int k = 0; k < sSub.size(); k++) {
for (int t = 0; t < tSub.size(); t++) {
if (verbose) System.out.println(sSub.get(k).getLocalName()+" "+tSub.get(t).getLocalName());
sims[k][t] = superclassesComparison(sSub.get(k), tSub.get(t));
}
}
if(verbose){
System.out.println("class1: "+cl1.getLocalName());
System.out.println("class2: "+cl2.getLocalName());
Utils.printMatrix(sims);
}
}
}
}
}
private void initHashMaps() {
sourceRestrictions = new HashMap<Node, List<Restriction>>();
targetRestrictions = new HashMap<Node, List<Restriction>>();
restrictions = new HashMap<Restriction, Node>();
for (int i = 0; i < sourcePropList.size(); i++) {
sourceRestrictions.put(sourcePropList.get(i),
getRestrictionsOnProperty(sourceClassList, sourcePropList.get(i)));
}
for (int i = 0; i < targetPropList.size(); i++) {
targetRestrictions.put(targetPropList.get(i),
getRestrictionsOnProperty(targetClassList, targetPropList.get(i)));
}
}
private void matchPropertyUsage() {
if(verbose){
System.out.println("MATCH PROPERTY USAGE");
}
Node sProp;
Node tProp;
List<Restriction> l1;
List<Restriction> l2;
ArrayList<Double> similarities = new ArrayList<Double>();
for (int i = 0; i < sourcePropList.size(); i++) {
sProp = sourcePropList.get(i);
l1 = sourceRestrictions.get(sProp);
//System.out.println("prop: "+sProp.getLocalName()+" size: "+l1.size());
similarities = new ArrayList<Double>();
for (int j = 0; j < targetPropList.size(); j++) {
tProp = targetPropList.get(j);
OntProperty sp = (OntProperty) sProp.getResource().as(OntProperty.class);
OntProperty tp = (OntProperty) tProp.getResource().as(OntProperty.class);
if((sp.isDatatypeProperty() && !tp.isDatatypeProperty())||
(!sp.isObjectProperty() && tp.isObjectProperty())){
similarities.add(0.0);
continue;
}
l2 = targetRestrictions.get(tProp);
if(l1.size()!=l2.size() || l1.size()==0){
similarities.add(0.0);
continue;
}
if(verbose){
System.out.println(sProp.getLocalName()+" "+tProp.getLocalName());
}
double[][] sims = new double[l1.size()][l1.size()];
for(int t=0; t<l1.size(); t++){
Restriction r1 = l1.get(t);
for(int k=0; k<l2.size(); k++){
Restriction r2 = l2.get(k);
sims[t][k] = restrictionUsageSimilarity(r1,r2);
}
}
//Obtain suboptimal solution
double usSim = Utils.optimalAlignment(sims);
similarities.add(usSim);
//Utils.printMatrix(sims);
if(verbose){
System.out.println("subSim: "+usSim);
}
}
//System.out.println(similarities);
int index = Utils.getOnlyMax(similarities);
if(verbose){
System.out.println("onlyMax: "+index);
}
if(index!=-1 && verbose) System.out.println(similarities.get(index));
if(index!=-1 && similarities.get(index)>parameters.getPropertyUsageThreshold()){
Mapping m = new Mapping( sProp, targetPropList.get(index), similarities.get(index));
m.setProvenance(PROPERTY_USAGE);
if(parameters.boostPropertyUsage) m.setSimilarity(1.0d);
propertiesMatrix.set(i, index, m);
if( Core.DEBUG_FCM ) System.out.println("ALIGNMENT:"+sProp.getLocalName()+" "+targetPropList.get(index).getLocalName()+" BY PROPERTY USAGE");
}
}
}
private double restrictionUsageSimilarity(Restriction r1, Restriction r2) {
try {
double restrSim = restrictionSimilarity(r1, r2, false);
double resSim = compareResources(restrictions.get(r1).getResource(), restrictions.get(r1).getResource());
return (2*restrSim+resSim)/3;
} catch( Exception e ) {
e.printStackTrace();
return 0d;
}
}
private List<Restriction> getRestrictionsOnProperty( List<Node> classList,
Node sProp) {
ArrayList<Restriction> restr = new ArrayList<Restriction>();
for(Node cl: classList){
OntClass ontClass = (OntClass)cl.getResource().as(OntClass.class);
try {
for(Object o: ontClass.listSuperClasses().toList()){
OntClass supClass = (OntClass) o;
if(supClass.isRestriction()){
Restriction r = supClass.asRestriction();
restrictions.put(r, cl);
if(r.getOnProperty().equals((OntProperty)sProp.getResource().as(OntProperty.class)))
restr.add(r);
}
}
} catch( Exception e ) {
e.printStackTrace();
}
}
return restr;
}
/* Find UnionClass types and match member classes
* @author Ulas
*/
private void matchUnionClasses(){
if(verbose) System.out.println("MATCH UNION");
ArrayList<UnionClass> unionClassesS = new ArrayList<UnionClass>();
ArrayList<UnionClass> unionClassesT = new ArrayList<UnionClass>();
ExtendedIterator<UnionClass> its = getSourceOntology().getModel().listUnionClasses();
ExtendedIterator<UnionClass> itt = getTargetOntology().getModel().listUnionClasses();
while(its.hasNext()){
UnionClass uc = its.next();
unionClassesS.add(uc);
}
while(itt.hasNext()){
UnionClass uc = itt.next();
unionClassesT.add(uc);
}
//System.out.println();
for(int k = 0; k < unionClassesS.size(); k++){
for(int m = 0; m < unionClassesT.size(); m++){
try {
matchUnionClassMember(unionClassesS.get(k), unionClassesT.get(m));
} catch( Exception e ) {
e.printStackTrace();
}
}
}
}
/* Matches member classes of two union classes
* @author Ulas
* @param UnionClass, UnionClass
*/
private void matchUnionClassMember(UnionClass a, UnionClass b){
ArrayList<OntClass> aList = new ArrayList<OntClass>();
for (ExtendedIterator<? extends OntClass> e = a.listOperands(); e.hasNext(); ) {
Resource r0 = (Resource) e.next();
OntClass unionMember = (OntClass) r0.as( OntClass.class );
aList.add(unionMember);
//System.out.print(" " + unionMember.getLocalName());
}
ArrayList<OntClass> bList = new ArrayList<OntClass>();
for (ExtendedIterator<? extends OntClass> e = b.listOperands(); e.hasNext(); ) {
Resource r0 = (Resource) e.next();
OntClass unionMember = (OntClass) r0.as( OntClass.class );
bList.add(unionMember);
//System.out.print(" " + unionMember.getLocalName());
}
if(aList.size() > 2 || bList.size() > 2){ return;}
boolean matchedS0 = false;
boolean matchedT0 = false;
boolean matchedS1 = false;
boolean matchedT1 = false;
for (int i = 0; i < sourceOntology.getClassesList().size(); i++) {
for (int j = 0; j < targetOntology.getClassesList().size(); j++) {
Mapping aln = null;
try{
aln = classesMatrix.get(i, j);
if(aln==null) continue;
Node currentNode = aln.getEntity1();
OntClass currentClassS = (OntClass) currentNode.getResource().as(OntClass.class);
if(currentClassS.equals(aList.get(0))){
matchedS0 = true;
//System.out.println("current: "+currentClassS);
Node n = classesMatrix.get(i, j).getEntity2();
double sims = classesMatrix.get(i,j).getSimilarity();
OntClass cT = (OntClass) n.getResource().as(OntClass.class);
if(verbose){
System.out.println(sourceClassList.get(i)+" "+targetClassList.get(j));
System.out.println("sims:"+sims);
System.out.println("cT:"+cT);
System.out.println("aList:"+aList);
System.out.println("bList:"+bList);
System.out.println("eq:"+cT.equals(bList.get(0))+" sims:"+sims);
}
if(cT.equals(bList.get(0)) && sims > 0.8){
//Align 1 and 1 Here
//int index1 = findSourceIndex(aList.get(0));
int i1 = getIndex(sourceClassList,aList.get(1).getURI());
int i2 = getIndex(targetClassList,bList.get(1).getURI());
if(i1==-1 || i2==-1) continue;
double sim1 = classesMatrix.getRowMaxValues(i1, 1)[0].getSimilarity();
double sim2 = classesMatrix.getColMaxValues(i2, 1)[0].getSimilarity();
if(verbose){
System.out.println(aList.get(0).getLocalName()+" "+bList.get(1).getLocalName());
System.out.println(sourceClassList.get(i)+" "+targetClassList.get(j));
System.out.println("sim1:"+sim1+" sim2:"+sim2);
}
if(sim1 < 0.6d && sim2 < 0.6d){
if(Core.DEBUG_FCM) System.out.println("ALIGNMENT:"+aList.get(1)+" "+bList.get(1)+" BY ULAS");
// classesMatrix.set(findSourceIndex(aList.get(1)), findTargetIndex(bList.get(1)),
// new Mapping(findSourceNode(aList.get(1)), findTargetNode(bList.get(1)), 1.0d));
int c1 = findSourceIndex(aList.get(1));
int c2 = findTargetIndex(bList.get(1));
if(i1==-1 || i2==-1) continue;
Mapping m = new Mapping(sourceClassList.get(c1), targetClassList.get(c2), 1.0d);
m.setProvenance(UNION_CLASSES);
classesMatrix.set(c1, c2, m);
if(verbose) System.out.println("ALIGNMENT:"+aList.get(1)+" "+bList.get(1)+" BY ULAS1");
}
}
else{
if(cT.equals(bList.get(1)) && sims > 0.8){
//Align 1 and 0 here
int i1 = getIndex(sourceClassList,aList.get(1).getURI());
int i2 = getIndex(targetClassList,bList.get(0).getURI());
if(i1==-1 || i2==-1) continue;
double sim1 = classesMatrix.getRowMaxValues(i1, 1)[0].getSimilarity();
double sim2 = classesMatrix.getColMaxValues(i2, 1)[0].getSimilarity();
if(sim1 < 0.6 && sim2 < 0.6d){
int c1 = findSourceIndex(aList.get(1));
int c2 = findTargetIndex(bList.get(0));
Mapping m = new Mapping(sourceClassList.get(c1), targetClassList.get(c2), 1.0d);
m.setProvenance(UNION_CLASSES);
classesMatrix.set(c1, c2, m);
if(verbose) System.out.println("ALIGNMENT:"+aList.get(1)+" "+bList.get(0)+" BY ULAS2");
}
}
}
}
else if(currentClassS.equals(aList.get(1))){
matchedS1 = true;
Node n = classesMatrix.get(i, j).getEntity2();
double sims = classesMatrix.get(i,j).getSimilarity();
OntClass cT = (OntClass) n.getResource().as(OntClass.class);
if(cT.equals(bList.get(0)) && sims > 0.8){
//Align 0 and 1 Here
double sim1 = classesMatrix.getRowMaxValues(findSourceIndex(aList.get(0)), 1)[0].getSimilarity();
double sim2 = classesMatrix.getColMaxValues(findTargetIndex(bList.get(1)), 1)[0].getSimilarity();
if(sim1 < 0.6d && sim2 < 0.6d){
int c1 = findSourceIndex(aList.get(0));
int c2 = findTargetIndex(bList.get(1));
Mapping m = new Mapping(sourceClassList.get(c1), targetClassList.get(c2), 1.0d);
m.setProvenance(UNION_CLASSES);
classesMatrix.set(c1, c2, m);
System.out.println("ALIGNMENT:"+aList.get(0)+" "+bList.get(1)+" BY ULAS3");
//System.out.println("A");
// classesMatrix.set(findSourceIndex(aList.get(0)), findTargetIndex(bList.get(1)),
// new Mapping(findSourceNode(aList.get(0)), findTargetNode(bList.get(1)), 1.0d));
// System.out.println();
}
}
else{
if(cT.equals(bList.get(1)) && sims > 0.8){
//Align 0 and 0 here
double sim1 = classesMatrix.getRowMaxValues(findSourceIndex(aList.get(0)), 1)[0].getSimilarity();
double sim2 = classesMatrix.getColMaxValues(findTargetIndex(bList.get(0)), 1)[0].getSimilarity();
if(sim1 < 0.6d && sim2 < 0.6d){
int c1 = findSourceIndex(aList.get(0));
int c2 = findTargetIndex(bList.get(0));
Mapping m = new Mapping(sourceClassList.get(c1), targetClassList.get(c2), 1.0d);
m.setProvenance(UNION_CLASSES);
classesMatrix.set(c1, c2, m);
System.out.println("ALIGNMENT:"+aList.get(0)+" "+bList.get(0)+" BY ULAS2");
// System.out.println("B");
// classesMatrix.set(findSourceIndex(aList.get(0)), findTargetIndex(bList.get(0)),
// new Mapping(findSourceNode(aList.get(0)), findTargetNode(bList.get(0)), 1.0d));
// System.out.println();
}
}
}
}
}
catch(Exception e){
//e.printStackTrace();
}
}
}
}
/* Finds index of a source class in the matrix
* @author Ulas
* @param OntClass
* @return int index
*/
private int findSourceIndex(OntClass c){
// Mapping aln = null;
// for(int i = 0; i < sourceOntology.getClassesList().size(); i++) {
// try{
// aln = classesMatrix.get(i, 0);
// Node currentNode = aln.getEntity1();
// OntClass currentClassS = (OntClass) currentNode.getResource().as(OntClass.class);
// if(c.equals(currentClassS)){
// return i;
// }
// }
// catch(Exception e){
// }
// }
// return -1;
return getIndex(sourceClassList, c.getURI());
}
/* Find Node type of a source class in the matrix
* @author Ulas
* @param OntClass
* @return Node represents the OntClass
*/
private Node findSourceNode(OntClass c){
Mapping aln = null;
for(int i = 0; i < sourceOntology.getClassesList().size(); i++) {
try{
aln = classesMatrix.get(i, 0);
Node currentNode = aln.getEntity1();
OntClass currentClassS = (OntClass) currentNode.getResource().as(OntClass.class);
if(c.equals(currentClassS)){
return currentNode;
}
}
catch(Exception e){
}
}
return null;
}
/* Finds index of a target class in the matrix
* @author Ulas
* @param OntClass
* @return int index
*/
private int findTargetIndex(OntClass c){
// Mapping aln = null;
// for(int i = 0; i < targetOntology.getClassesList().size(); i++) {
// try{
// aln = classesMatrix.get(0, i);
// Node currentNode = aln.getEntity2();
// OntClass currentClassS = (OntClass) currentNode.getResource().as(OntClass.class);
// if(c.equals(currentClassS)){
// return i;
// }
// }
// catch(Exception e){
// }
// }
// return -1;
return getIndex(targetClassList, c.getURI());
}
/* Find Node type of a target class in the matrix
* @author Ulas
* @param OntClass
* @return Node represents the OntClass
*/
private Node findTargetNode(OntClass c){
Mapping aln = null;
for(int i = 0; i < targetOntology.getClassesList().size(); i++) {
try{
aln = classesMatrix.get(0, i);
Node currentNode = aln.getEntity2();
OntClass currentClassT = (OntClass) currentNode.getResource().as(OntClass.class);
if(c.equals(currentClassT)){
return currentNode;
}
}
catch(Exception e){
}
}
return null;
}
private void matchSuperclasses() {
ArrayList<Double> similarities = new ArrayList<Double>();
//Match classes based on Superclasses and types
for (int i = 0; i<sourceOntology.getClassesList().size(); i++) {
Node source = sourceOntology.getClassesList().get(i);
//You can print something once per class
similarities = new ArrayList<Double>();
double sim;
for (int j = 0; j<targetOntology.getClassesList().size(); j++) {
Node target = targetOntology.getClassesList().get(j);
sim = superclassesComparison(source,target);
classSimilarities[i][j].setSuperclasses(sim);
similarities.add(sim);
}
if(verbose) System.out.println(similarities);
int index = Utils.getOnlyMax(similarities);
if(verbose){
System.out.println("onlyMax: "+index);
if(index!=-1) System.out.println(similarities.get(index));
}
if(index!=-1 && similarities.get(index)>=parameters.getSuperclassThreshold()){
Mapping m = new Mapping(source, targetClassList.get(index), similarities.get(index));
m.setProvenance(SUBCLASSOF);
if(parameters.boostSubclassOf) m.setSimilarity(1.0d);
classesMatrix.set(i, index, m);
if( Core.DEBUG_FCM ) System.out.println("ALIGNMENT:"+source.getLocalName()+" "
+targetClassList.get(index).getLocalName()+" BY SUBCLASSOF");
}
verbose = false;
}
}
private double superclassesComparison(OntClass sClass, OntClass tClass) {
double subSim = 0.0d;
try {
List<OntClass> l1 = sClass.listSuperClasses().toList();
List<OntClass> l2 = tClass.listSuperClasses().toList();
if(l1.size()!=l2.size() || l1.size()==0) return 0.0d;
double[][] sims = new double[l1.size()][l1.size()];
for(int i=0; i<l1.size(); i++){
OntClass c1 = (OntClass) l1.get(i);
for(int j=0; j<l2.size(); j++){
OntClass c2 = (OntClass) l2.get(j);
sims[i][j] = superClassSimilarity(c1,c2);
}
}
//Obtain best matching solution
subSim = Utils.optimalAlignment(sims);
if(verbose){
Utils.printMatrix(sims);
System.out.println("subSim: "+subSim);
}
} catch( Exception e ) {
e.printStackTrace();
return 0.0d;
}
return subSim;
}
private double superclassesComparison(Node source, Node target) {
if(verbose)
System.out.println("SuperClassesComp: "+source.getLocalName()+","+target.getLocalName());
OntClass sClass = null;
OntClass tClass = null;
if(!source.getResource().canAs(OntClass.class) || !target.getResource().canAs(OntClass.class))
return 0.0;
sClass = (OntClass) source.getResource();
tClass = (OntClass) target.getResource();
return superclassesComparison(sClass, tClass);
}
private double superClassSimilarity(OntClass c1, OntClass c2) {
//System.out.println(c1+" "+c2);
if(c1.isRestriction() && c2.isRestriction()){
//System.out.println("RESTR");
try {
return restrictionSimilarity((Restriction)c1.as(Restriction.class),
(Restriction)c2.as(Restriction.class),true);
} catch( Exception e ) {
e.printStackTrace();
return 0d;
}
}
if(c1.getURI()!=null && c2.getURI()!=null){
//System.out.println("ALIGN");
return alignedClass(c1.getURI(),c2.getURI());
}
return 0.0;
}
private double restrictionSimilarity(Restriction r1, Restriction r2,boolean classes) throws Exception {
double sim = 0;
double onProp = 0;
try {
if(classes)
onProp = alignedProp(r1.getOnProperty().getURI(), r2.getOnProperty().getURI());
} catch( Exception e ) {
e.printStackTrace();
}
if(r1.isMaxCardinalityRestriction() && r2.isMaxCardinalityRestriction()){
MaxCardinalityRestriction m1 = r1.asMaxCardinalityRestriction();
MaxCardinalityRestriction m2 = r1.asMaxCardinalityRestriction();
if(m1.getMaxCardinality()==m2.getMaxCardinality())
sim++;
}
else if(r1.isMinCardinalityRestriction() && r2.isMinCardinalityRestriction()){
MinCardinalityRestriction m1 = r1.asMinCardinalityRestriction();
MinCardinalityRestriction m2 = r1.asMinCardinalityRestriction();
if(m1.getMinCardinality()==m2.getMinCardinality())
sim++;
}
else if(r1.isCardinalityRestriction() && r2.isCardinalityRestriction()){
try {
CardinalityRestriction c1 = r1.asCardinalityRestriction();
CardinalityRestriction c2 = r2.asCardinalityRestriction();
if(c1.getCardinality()==c2.getCardinality())
sim++;
} catch( Exception e ) {
e.printStackTrace();
}
}
else if(r1.isAllValuesFromRestriction() && r2.isAllValuesFromRestriction()){
AllValuesFromRestriction a1 = r1.asAllValuesFromRestriction();
AllValuesFromRestriction a2 = r2.asAllValuesFromRestriction();
double resSim = compareResources(a1.getAllValuesFrom(),a2.getAllValuesFrom());
if(resSim==1 && Utils.primitiveType(a1.getAllValuesFrom().getURI()))
resSim = 0.75;
sim += resSim;
}
if(classes)
return (sim*3+onProp)/4;
else return sim;
}
private void matchRangeAndDomain() {
for (int i = 0; i < sourcePropList.size(); i++) {
for (int j = 0; j < targetPropList.size(); j++){
double sim = rangeAndDomainSimilarity(sourcePropList.get(i),targetPropList.get(j));
propSimilarities[i][j].setRangeAndDomain(sim);
if(sim>=parameters.getRangeDomainThreshold()){
if( Core.DEBUG_FCM ) System.out.println("ALIGNMENT:"+sourcePropList.get(i).getLocalName()+" "
+targetPropList.get(j).getLocalName()+" BY RANGE/DOMAIN");
Mapping m = new Mapping(sourcePropList.get(i),targetPropList.get(j), sim);
m.setProvenance(RANGE_DOMAIN);
if(parameters.boostRangeDomain) m.setSimilarity(1.0d);
propertiesMatrix.set(i,j, m);
}
}
}
return;
}
private double rangeAndDomainSimilarity(Node source, Node target) {
if(!source.getResource().canAs(OntProperty.class) ||
!target.getResource().canAs(OntProperty.class))
return 0.0;
OntProperty sProp = (OntProperty) source.getResource().as(OntProperty.class);
OntProperty tProp = (OntProperty) target.getResource().as(OntProperty.class);
return rangeAndDomainSimilarity(sProp, tProp);
}
private double domainSimilarity(OntResource sDom, OntResource tDom) {
if(sDom.canAs(OntClass.class) && tDom.canAs(OntClass.class)){
OntClass c1 = sDom.asClass();
OntClass c2 = tDom.asClass();
if(!c1.isUnionClass() || !c2.isUnionClass()){
if(sDom.getURI()!=null && tDom.getURI()!=null){
if(sDom.getURI().equals(tDom.getURI()))
return 1;
else return alignedClass(sDom.getURI(), tDom.getURI());
}
}
else{
//BOTH UNION CLASSES
if(verbose){
System.out.println("Both union!!");
System.out.println(c1.getLocalName()+", "+c2.getLocalName());
}
UnionClass u1 = c1.asUnionClass();
UnionClass u2 = c2.asUnionClass();
List<? extends OntClass> l1 = u1.listOperands().toList();
List<? extends OntClass> l2 = u2.listOperands().toList();
if(l1.size() != l2.size() || l1.size()==0)
return 0.0;
double[][] sims = new double[l1.size()][l1.size()];
for(int i=0; i<l1.size(); i++){
OntResource r1 = (OntResource)l1.get(i);
for(int j=0; j<l2.size(); j++){
OntResource r2 = (OntResource) l2.get(j);
sims[i][j] = compareResources(r1, r2);
}
}
if(verbose){
System.out.println("UNION COMP:");
Utils.printMatrix(sims);
}
//Obtain suboptimal solution
double unionSim = Utils.optimalAlignment(sims);
if(verbose){
System.out.println("unionSim: "+unionSim);
}
if(unionSim>0) unionSim += 0.3;
return unionSim;
}
}
return 0;
}
public static double individualsComparison(List<Individual> sList, List<Individual> tList){
//Look at individuals
if(sList.size()==0 || tList.size()==0) return 0;
Individual sInd;
Individual tInd;
int count = 0;
for (int i = 0; i < sList.size(); i++) {
for (int j = 0; j < tList.size(); j++) {
sInd = sList.get(i);
tInd = tList.get(j);
if(!sInd.isAnon() && !tInd.isAnon()){
if(sInd.getLocalName().equals(tInd.getLocalName())){
count++;
}
}
}
}
return 2*count/(sList.size()+tList.size());
}
public double alignedClass(String sURI,String tURI){
int s = -1;
int t = -1;
for (int i = 0; i < sourceClassList.size(); i++) {
if(sourceClassList.get(i).getUri().equals(sURI))
s = i;
}
if(s==-1) return 0.0;
for (int i = 0; i < targetClassList.size(); i++) {
if(targetClassList.get(i).getUri().equals(tURI))
t = i;
}
if(t==-1) return 0.0;
return classesMatrix.getSimilarity(s, t);
}
public double alignedProp(String sURI,String tURI){
int s = -1;
int t = -1;
for (int i = 0; i < sourcePropList.size(); i++) {
if(sourcePropList.get(i).getUri().equals(sURI))
s = i;
}
if(s==-1) return 0.0;
for (int i = 0; i < targetPropList.size(); i++) {
if(targetPropList.get(i).getUri().equals(tURI))
t = i;
}
if(t==-1) return 0.0;
return propertiesMatrix.getSimilarity(s, t);
}
private int getIndex( List<Node> list, String uri) {
for (int i = 0; i < list.size(); i++) {
if(list.get(i).getUri().equals(uri))
return i;
}
return -1;
}
private double compareResources(Resource r1, Resource r2){
String uri1 = r1.getURI();
String uri2 = r2.getURI();
if(uri1==null || uri2==null) return 0.0;
if(uri1.equals(uri2))
return 1.0;
double simClass = alignedClass(uri1,uri2);
double simProp = alignedProp(uri1,uri2);
if(simClass > simProp)
return simClass;
else return simProp;
}
private double getNumberOfClassAlignments() {
double[][] matrix = classesMatrix.getCopiedSimilarityMatrix();
double sum = 0;
for (int i = 0; i < matrix.length; i++) {
for (int j = 0; j < matrix[0].length; j++) {
sum += matrix[i][j];
}
}
return sum;
}
private double getNumberOfPropAlignments() {
double[][] matrix = propertiesMatrix.getCopiedSimilarityMatrix();
double sum = 0;
for (int i = 0; i < matrix.length; i++) {
for (int j = 0; j < matrix[0].length; j++) {
sum += matrix[i][j];
}
}
return sum;
}
private void printPropValues() {
Iterator<OntProperty> it = sourcePropValues.keySet().iterator();
while(it.hasNext()){
OntProperty prop = it.next();
//System.out.println(prop);
//System.out.println(sourcePropValues.get(prop));
}
//System.out.println("TARGET");
//System.out.println(targetPropValues);
Iterator<OntProperty> it2 = targetPropValues.keySet().iterator();
while(it2.hasNext()){
OntProperty prop = it2.next();
//System.out.println(prop);
//System.out.println(targetPropValues.get(prop));
}
}
/**
* Input must be a Node representing a class. (i.e. Node.isClass() == true)
* @param currentNode Node object representing a class.
* @return List of OntResource object representing the individuals.
*/
public ArrayList<Individual> getIndividuals( Node currentNode ) {
ArrayList<Individual> individualsList = new ArrayList<Individual>();
OntClass currentClass = (OntClass) currentNode.getResource().as(OntClass.class);
ExtendedIterator indiIter = currentClass.listInstances(true);
while( indiIter.hasNext() ) {
Individual ci = (Individual) indiIter.next();
//if( ci.isAnon() ) System.out.println("\n************************\nProperties of individual:" + ci.getId() );
//else System.out.println("\n************************\nProperties of individual:" + ci.getLocalName() );
StmtIterator indiPropertiesIter = ci.listProperties();
while( indiPropertiesIter.hasNext() ) {
Statement currentProperty = indiPropertiesIter.nextStatement();
//System.out.println(currentProperty);
}
individualsList.add( ci );
}
// try to deal with improperly declared individuals. (from the 202 scrambled ontology)
if( individualsList.isEmpty() ) {
OntModel mod = (OntModel) currentClass.getModel();
List<Statement> ls = mod.listStatements(null , mod.getProperty("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), mod.getResource(currentClass.getLocalName())).toList();
Iterator<Statement> lsiter = ls.iterator();
int k = 1;
while( lsiter.hasNext() ) {
Statement s = lsiter.next();
Resource r = s.getSubject();
if( r.canAs(Individual.class) ) {
Individual indi = r.as(Individual.class);
Individual ci = indi;
//if( ci.isAnon() ) System.out.println("\n************************\nProperties of individual:" + ci.getId() );
//else System.out.println("\n************************\nProperties of individual:" + ci.getLocalName() );
StmtIterator indiPropertiesIter = ci.listProperties();
while( indiPropertiesIter.hasNext() ) {
Statement currentProperty = indiPropertiesIter.nextStatement();
RDFNode currentnode = currentProperty.getObject();
if( currentnode.canAs(Literal.class) ) {
Literal currentLiteral = (Literal) currentnode.as(Literal.class);
currentLiteral.getString();
}
//System.out.println(currentProperty);
}
individualsList.add(indi);
}
}
}
return individualsList;
}
private boolean matchIndividuals(Node source, Node target) {
boolean classMatched = false;
boolean individualMatched = false;
ArrayList<Individual> sourceIndi = getIndividuals(source);
ArrayList<Individual> targetIndi = getIndividuals(target);
for (Individual iSource: sourceIndi){
for (Individual iTarget: targetIndi) {
if (!iSource.isAnon() && !iTarget.isAnon()) { //if neither is anonymous
if (iSource.getLocalName().equals(iTarget.getLocalName())) {
classMatched = true;
individualMatched = recursiveMatchIndividuals(iSource, iTarget);
}
}
else if (iSource.isAnon() && iTarget.isAnon()) { //both anonnymous
individualMatched = recursiveMatchIndividuals(iSource, iTarget); //prop?
}
if (individualMatched) classMatched = true;
}
}
return classMatched;
}
private boolean recursiveMatchIndividuals(Individual iSource, Individual iTarget) {
boolean IndividualsMatched = false;
boolean propertyMatched = false;
List<Statement> sourceProperties = iSource.listProperties().toList();
List<Statement> targetProperties = iTarget.listProperties().toList();
// for (Statement s: sourceProperties) System.out.println(s);
for(int i=0;i<sourceProperties.size();i++){
for(int j=0;j<targetProperties.size();j++){
Statement sourceProperty = sourceProperties.get(i);
Statement targetProperty = targetProperties.get(j);
propertyMatched = false;
if (sourceProperty.getObject().isAnon() && targetProperty.getObject().isAnon()) {
// RDFNode subject = sourceProperty.getSubject();
// RDFNode object = sourceProperty.getObject();
// RDFNode prop = sourceProperty.getPredicate();
propertyMatched = recursiveMatchIndividuals((Individual)(sourceProperty.getObject().as(Individual.class)),
(Individual)(targetProperty.getObject().as(Individual.class)));
}
else {
Triple sourcePropTriple = (sourceProperty).asTriple();
Triple targetPropTriple = (targetProperty).asTriple();
//System.out.println("s:"+sourcePropTriple);
//System.out.println("t:"+targetPropTriple);
if(sourcePropTriple.getObject().equals(targetPropTriple.getObject())){
//System.out.println("EQUALS");
//System.out.println(sourcePropTriple.getPredicate().getURI());
String uri1 = sourcePropTriple.getPredicate().getURI();
String uri2 = targetPropTriple.getPredicate().getURI();
if(uri2.length()<20)
uri2 = targetOntology.getURI() + uri2;
//System.out.println(uri2);
Node source = get(sourcePropList, uri1);
//System.out.println(source);
Node target = get(targetPropList, uri2);
//System.out.println(target);
if (source != null && target != null) {
Mapping m = new Mapping(source, target, 1.0d);
m.setProvenance(RECURSIVE_INDIVIDUALS);
propertiesMatrix.set(sourcePropList.indexOf(source), targetPropList.indexOf(target), m);
//propertiesMatrix.setSimilarity(sourcePropList.indexOf(source), targetPropList.indexOf(target), 1.0);
}
propertyMatched = true;
}
}
if (propertyMatched) IndividualsMatched = true;
}
}
return IndividualsMatched;
}
private Node get( List<Node> nodeList, String uri) {
int ind = getIndex(nodeList, uri);
if(ind!=-1)
return nodeList.get(ind);
return null;
}
public ClassSimilarity getClassSimilarity(Mapping mapping){
if(mapping.getAlignmentType().equals(alignType.aligningProperties)) return null;
return classSimilarities[mapping.getSourceKey()][mapping.getTargetKey()];
}
public PropertySimilarity getPropertySimilarity(Mapping mapping){
if(mapping.getAlignmentType().equals(alignType.aligningClasses)) return null;
return propSimilarities[mapping.getSourceKey()][mapping.getTargetKey()];
}
public void setUseIndividuals(boolean useIndividuals){
individuals = useIndividuals;
}
@Override
public AbstractMatcherParametersPanel getParametersPanel(){
if(parametersPanel == null){
parametersPanel = new IterativeInstanceStructuralParametersPanel();
}
return parametersPanel;
}
public String allSimilarities(Alignment<Mapping> mappings){
String ret = "Source\tTarget\tSimilarity\tSyntactic\tRestrictions\tSuperclasses\tSubclasses\tProvenance\n";
for(Mapping mapping: mappings){
if(mapping.getAlignmentType() == alignType.aligningClasses)
ret += classMappingTuple(mapping) + "\n";
}
ret += "Source\tTarget\tSimilarity\tSyntactic\tRangeDomain\tValues\tSubproperties\tProvenance\n";
for(Mapping mapping: mappings){
if(mapping.getAlignmentType() == alignType.aligningProperties)
ret += propertyMappingTuple(mapping) + "\n";
}
return ret;
}
public String allSimilarities(alignType type, boolean onlyMappings){
if(classesAlignmentSet==null || propertiesAlignmentSet==null)
return null;
if(type == alignType.aligningClasses){
String ret = "Source\tTarget\tSimilarity\tSyntactic\tRestrictions\tSuperclasses\tSubclasses\tProvenance\n";
for(Mapping mapping: classesAlignmentSet){
ret += classMappingTuple(mapping) + "\n";
}
return ret;
}
else if(type == alignType.aligningProperties){
String ret = "Source\tTarget\tSimilarity\tSyntactic\tRangeDomain\tValues\tSubproperties\tProvenance\n";
PropertySimilarity sim = null;
for(Mapping mapping: propertiesAlignmentSet){
ret += propertyMappingTuple(mapping) + "\n";
}
return ret;
}
return null;
}
public String classMappingTuple(Mapping mapping){
String ret = mapping.getEntity1().getLocalName() + "\t" + mapping.getEntity2().getLocalName() +
"\t" + mapping.getSimilarity();
ClassSimilarity sim = getClassSimilarity(mapping);
ret += "\t" + sim.getSyntactic() + "\t" + sim.getRestrictions() + "\t" + sim.getSuperclasses()
+ "\t" + sim.getSubclasses();
if(mapping.getProvenance()!=null) ret += "\t" + mapping.getProvenance();
else ret += "\t" + SYNTACTIC;
return ret;
}
public String propertyMappingTuple(Mapping mapping){
String ret = mapping.getEntity1().getLocalName() + "\t" + mapping.getEntity2().getLocalName() +
"\t" + mapping.getSimilarity();
PropertySimilarity sim = getPropertySimilarity(mapping);
ret += "\t" + sim.getSyntactic() + "\t" + sim.getRangeAndDomain() + "\t" + sim.getValues() +
"\t" + sim.getSubProperties();
if(mapping.getProvenance()!=null) ret += "\t" + mapping.getProvenance();
else ret += "\t" + SYNTACTIC;
return ret;
}
private void printAllSimilarities() {
for (int i = 0; i < classSimilarities.length; i++) {
for (int j = 0; j < classSimilarities[0].length; j++) {
System.out.println(sourceClassList.get(i)+" "+targetClassList.get(j)
+classesMatrix.getSimilarity(i, j));
System.out.println(classSimilarities[i][j]);
}
}
for (int i = 0; i < propSimilarities.length; i++) {
for (int j = 0; j < propSimilarities[0].length; j++) {
System.out.println(sourcePropList.get(i)+" "+targetPropList.get(j)
+ " " + propertiesMatrix.getSimilarity(i, j));
System.out.println(propSimilarities[i][j]);
}
}
}
} | Set name and category.
| AgreementMaker-Matchers/IterativeInstanceStructuralMatcher/src/iterativeinstancestructuralmatcher/internal/IterativeInstanceStructuralMatcher.java | Set name and category. |
|
Java | agpl-3.0 | e733bdc881c5fc2b93f6c7f485244e9b08002e49 | 0 | duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test | 35e9af32-2e62-11e5-9284-b827eb9e62be | hello.java | 35e4447a-2e62-11e5-9284-b827eb9e62be | 35e9af32-2e62-11e5-9284-b827eb9e62be | hello.java | 35e9af32-2e62-11e5-9284-b827eb9e62be |
|
Java | lgpl-2.1 | 6cc81c6196e1a5f097e0aa146970a27f7ba258d8 | 0 | mmusgrov/narayana,tomjenkinson/narayana,tomjenkinson/narayana,gytis/narayana,mmusgrov/narayana,jbosstm/narayana,gytis/narayana,tomjenkinson/narayana,mmusgrov/narayana,gytis/narayana,jbosstm/narayana,jbosstm/narayana,tomjenkinson/narayana,gytis/narayana,gytis/narayana,gytis/narayana,jbosstm/narayana,mmusgrov/narayana | /*
* Copyright 2013, Red Hat Middleware LLC, and individual contributors
* as indicated by the @author tags.
* See the copyright.txt in the distribution for a
* full listing of individual contributors.
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU Lesser General Public License, v. 2.1.
* This program is distributed in the hope that it will be useful, but WITHOUT A
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public License,
* v.2.1 along with this distribution; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*
* (C) 2013
* @author JBoss Inc.
*/
package com.arjuna.ats.internal.jta.resources.arjunacore;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.Enumeration;
import java.util.Map;
import java.util.Vector;
import javax.transaction.RollbackException;
import javax.transaction.Status;
import javax.transaction.Synchronization;
import javax.transaction.SystemException;
import javax.transaction.xa.XAException;
import javax.transaction.xa.XAResource;
import javax.transaction.xa.Xid;
import com.arjuna.ats.internal.jta.resources.XAResourceErrorHandler;
import com.arjuna.ats.internal.jta.transaction.arjunacore.TransactionSynchronizationRegistryImple;
import org.jboss.tm.ConnectableResource;
import org.jboss.tm.XAResourceWrapper;
import com.arjuna.ats.arjuna.ObjectType;
import com.arjuna.ats.arjuna.common.Uid;
import com.arjuna.ats.arjuna.coordinator.AbstractRecord;
import com.arjuna.ats.arjuna.coordinator.BasicAction;
import com.arjuna.ats.arjuna.coordinator.RecordType;
import com.arjuna.ats.arjuna.coordinator.TwoPhaseOutcome;
import com.arjuna.ats.arjuna.coordinator.TxControl;
import com.arjuna.ats.arjuna.logging.tsLogger;
import com.arjuna.ats.arjuna.recovery.RecoveryManager;
import com.arjuna.ats.arjuna.recovery.RecoveryModule;
import com.arjuna.ats.arjuna.state.InputObjectState;
import com.arjuna.ats.arjuna.state.OutputObjectState;
import com.arjuna.ats.internal.jta.recovery.arjunacore.CommitMarkableResourceRecordRecoveryModule;
import com.arjuna.ats.internal.jta.transaction.arjunacore.TransactionImple;
import com.arjuna.ats.internal.jta.xa.XID;
import com.arjuna.ats.jta.common.JTAEnvironmentBean;
import com.arjuna.ats.jta.logging.jtaLogger;
import com.arjuna.ats.jta.utils.XAHelper;
import com.arjuna.ats.jta.xa.XidImple;
import com.arjuna.common.internal.util.propertyservice.BeanPopulator;
/**
* The CommitMarkableResourceRecord does not support nested transactions
*
* If the database is down forever that a CommitMarkableResourceRecord is linked
* to, it will have the side effect of never expiring a RecoverAtomicAction.
*
* The CommitMarkableResourceRecord assumes the following table has been
* created:
*
* syb:
*
* create table xids (xid varbinary(144), transactionManagerID varchar(64),
* actionuid varbinary(28))
*
* ora:
*
* create table xids (xid RAW(144), transactionManagerID varchar(64), actionuid
* RAW(28))
*
* psql:
*
* create table xids (xid bytea, transactionManagerID varchar(64), actionuid
* bytea)
*
* h2:
*
* create table xids (xid varbinary(144), transactionManagerID varchar(64),
* actionuid varbinary(28))
*
* sybase notes: sp_configure "lock scheme",0,datarows
*/
public class CommitMarkableResourceRecord extends AbstractRecord {
private final String tableName;
private Xid xid;
private ConnectableResource connectableResource;
private boolean onePhase = false;
private String commitMarkableJndiName;
private boolean committed;
private BasicAction basicAction;
private String productName;
private String productVersion;
private boolean hasCompleted;
private static CommitMarkableResourceRecordRecoveryModule commitMarkableResourceRecoveryModule;
private static final JTAEnvironmentBean jtaEnvironmentBean = BeanPopulator
.getDefaultInstance(JTAEnvironmentBean.class);
private static final Map<String, String> commitMarkableResourceTableNameMap = jtaEnvironmentBean
.getCommitMarkableResourceTableNameMap();
private static final String defaultTableName = jtaEnvironmentBean
.getDefaultCommitMarkableTableName();
private boolean isPerformImmediateCleanupOfBranches = jtaEnvironmentBean
.isPerformImmediateCleanupOfCommitMarkableResourceBranches();
private Connection preparedConnection;
private static final boolean isNotifyRecoveryModuleOfCompletedBranches = jtaEnvironmentBean
.isNotifyCommitMarkableResourceRecoveryModuleOfCompleteBranches();
private static final Map<String, Boolean> isPerformImmediateCleanupOfCommitMarkableResourceBranchesMap = jtaEnvironmentBean
.getPerformImmediateCleanupOfCommitMarkableResourceBranchesMap();
static {
commitMarkableResourceRecoveryModule = null;
RecoveryManager recMan = RecoveryManager.manager();
Vector recoveryModules = recMan.getModules();
if (recoveryModules != null) {
Enumeration modules = recoveryModules.elements();
while (modules.hasMoreElements()) {
RecoveryModule m = (RecoveryModule) modules.nextElement();
if (m instanceof CommitMarkableResourceRecordRecoveryModule) {
commitMarkableResourceRecoveryModule = (CommitMarkableResourceRecordRecoveryModule) m;
break;
}
}
}
}
/**
* For recovery
*/
public CommitMarkableResourceRecord() {
if (tsLogger.logger.isTraceEnabled()) {
tsLogger.logger.trace("CommitMarkableResourceRecord.CommitMarkableResourceRecord (), record id=" + order());
}
tableName = null;
}
public CommitMarkableResourceRecord(TransactionImple tx,
ConnectableResource xaResource, final Xid xid,
BasicAction basicAction) throws IllegalStateException,
RollbackException, SystemException {
super(new Uid(), null, ObjectType.ANDPERSISTENT);
if (tsLogger.logger.isTraceEnabled()) {
tsLogger.logger.trace("CommitMarkableResourceRecord.CommitMarkableResourceRecord ( " + tx + ", " + xaResource + ", "
+ xid + ", " + basicAction + " ), record id=" + order());
}
this.connectableResource = xaResource;
XAResourceWrapper xaResourceWrapper = ((XAResourceWrapper) xaResource);
this.commitMarkableJndiName = xaResourceWrapper.getJndiName();
this.productName = xaResourceWrapper.getProductName();
this.productVersion = xaResourceWrapper.getProductVersion();
this.xid = xid;
this.basicAction = basicAction;
String tableName = commitMarkableResourceTableNameMap
.get(commitMarkableJndiName);
if (tableName != null) {
this.tableName = tableName;
} else {
this.tableName = defaultTableName;
}
Boolean boolean1 = isPerformImmediateCleanupOfCommitMarkableResourceBranchesMap
.get(commitMarkableJndiName);
if (boolean1 != null) {
isPerformImmediateCleanupOfBranches = boolean1;
}
if (isPerformImmediateCleanupOfBranches) {
// a session synch may enlist a CMR in a transaction so this sycnh must be correctly ordered
new TransactionSynchronizationRegistryImple()
.registerInterposedSynchronization(new Synchronization() {
@Override
public void beforeCompletion() {
}
@Override
public void afterCompletion(int status) {
if (!onePhase && status == Status.STATUS_COMMITTED) {
Connection connection = null;
try {
connection = ((Connection) connectableResource
.getConnection());
connection.setAutoCommit(false);
String sql = "DELETE from "
+ CommitMarkableResourceRecord.this.tableName
+ " where xid in (?)";
PreparedStatement prepareStatement = connection
.prepareStatement(sql);
try {
XID toSave = ((XidImple) xid).getXID();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(
baos);
dos.writeInt(toSave.formatID);
dos.writeInt(toSave.gtrid_length);
dos.writeInt(toSave.bqual_length);
dos.writeInt(toSave.data.length);
dos.write(toSave.data);
dos.flush();
prepareStatement.setBytes(1, baos.toByteArray());
if (prepareStatement.executeUpdate() != 1) {
tsLogger.logger
.error("Update was not successfull");
connection.rollback();
} else {
connection.commit();
}
} catch (IOException e) {
tsLogger.logger
.warn("Could not generate prepareStatement paramaters",
e);
} finally {
try {
prepareStatement.close();
} catch (SQLException e) {
tsLogger.logger
.warn("Could not close the prepared statement",
e);
}
}
} catch (Throwable e1) {
tsLogger.logger
.warn("Could not delete CommitMarkableResourceRecord entry, will rely on RecoveryModule",
e1);
} finally {
if (connection != null) {
try {
connection.close();
} catch (SQLException e) {
tsLogger.logger
.warn("Could not close the preparedConnection",
e);
}
}
}
}
}
});
} else if (isNotifyRecoveryModuleOfCompletedBranches) {
// a session synch may enlist a CMR in a transaction so this sycnh must be correctly ordered
new TransactionSynchronizationRegistryImple()
.registerInterposedSynchronization(new Synchronization() {
@Override
public void beforeCompletion() {
}
@Override
public void afterCompletion(int status) {
if (!onePhase && status == Status.STATUS_COMMITTED) {
commitMarkableResourceRecoveryModule
.notifyOfCompletedBranch(
commitMarkableJndiName, xid);
}
}
});
}
}
public String getProductName() {
return productName;
}
public String getProductVersion() {
return productVersion;
}
public String getJndiName() {
return commitMarkableJndiName;
}
public void updateOutcome(boolean committed) {
this.hasCompleted = true;
this.committed = committed;
}
/**
* We need to save this so we know there was a ConnectableResource in the
* intentions list.
*/
public boolean doSave() {
return true;
}
public boolean save_state(OutputObjectState os, int t) {
boolean res = false;
try {
// We store these information so that during recovery we can query
// the resource
// manager to see if it had committed prior to any potential crash
if (tsLogger.logger.isTraceEnabled()) {
tsLogger.logger.trace("pack: " + commitMarkableJndiName);
}
os.packString(commitMarkableJndiName);
if (tsLogger.logger.isTraceEnabled()) {
tsLogger.logger.trace("pack: " + xid);
}
XidImple.pack(os, xid);
os.packBoolean(hasCompleted);
if (hasCompleted) {
os.packBoolean(committed);
}
os.packString(productName);
os.packString(productVersion);
res = super.save_state(os, t);
} catch (Exception e) {
jtaLogger.logger.warn(
"Could not save_state: " + XAHelper.xidToString(xid), e);
}
return res;
}
public boolean restore_state(InputObjectState os, int t) {
boolean res = false;
try {
commitMarkableJndiName = os.unpackString();
if (tsLogger.logger.isTraceEnabled()) {
tsLogger.logger.trace("unpack: " + commitMarkableJndiName);
}
xid = XidImple.unpack(os);
if (tsLogger.logger.isTraceEnabled()) {
tsLogger.logger.trace("unpack: " + xid);
}
if (os.unpackBoolean()) {
committed = os.unpackBoolean();
} else {
// This will return true if the
// CommitMarkableRecoveryModule is
// between phases and the XID
// has not been GC'd
committed = commitMarkableResourceRecoveryModule.wasCommitted(
commitMarkableJndiName, xid);
}
productName = os.unpackString();
productVersion = os.unpackString();
res = super.restore_state(os, t);
} catch (Exception e) {
jtaLogger.logger.warn(
"Could not restore_state" + XAHelper.xidToString(xid), e);
}
return res;
}
/**
* This will add the required recovery data about this resource into the
* resources preparedConnection. If the preparedConnection is in read only
* mode, we do not need to persist this information.
*/
public int topLevelPrepare() {
if (tsLogger.logger.isTraceEnabled()) {
tsLogger.logger.trace("CommitMarkableResourceRecord.topLevelPrepare for " + this + ", record id=" + order());
}
try {
PreparedStatement prepareStatement = null;
preparedConnection = (Connection) connectableResource
.getConnection();
try {
prepareStatement = preparedConnection
.prepareStatement("insert into "
+ tableName
+ " (xid, transactionManagerID, actionuid) values (?,?,?)");
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
XID toSave = ((XidImple) xid).getXID();
dos.writeInt(toSave.formatID);
dos.writeInt(toSave.gtrid_length);
dos.writeInt(toSave.bqual_length);
dos.writeInt(toSave.data.length);
dos.write(toSave.data);
dos.flush();
prepareStatement.setBytes(1, baos.toByteArray());
prepareStatement.setString(2, TxControl.getXANodeName());
prepareStatement.setBytes(3, basicAction.get_uid().getBytes());
if (prepareStatement.executeUpdate() != 1) {
tsLogger.logger.warn("Update was not successful");
removeConnection();
return TwoPhaseOutcome.PREPARE_NOTOK;
}
} finally {
if (prepareStatement != null)
prepareStatement.close();
}
return TwoPhaseOutcome.PREPARE_OK;
} catch (Throwable t) {
tsLogger.logger.error(
"Could not add recovery data to the 1PC resource", t);
return TwoPhaseOutcome.PREPARE_NOTOK;
}
}
public int topLevelAbort() {
if (tsLogger.logger.isTraceEnabled()) {
tsLogger.logger.trace("CommitMarkableResourceRecord.topLevelAbort for " + this + ", record id=" + order());
}
try {
try {
// This can never be null as it can only ever be called before
// crash
// when we have a reference
// on a connectableResource still. Although topLevelAbort can be
// called for RecoverAtomicAction, it
// can only do that for resources after the head position in the
// preparedList, we know this resource
// must be first
((XAResource) connectableResource).rollback(xid);
hasCompleted = true;
committed = false;
return TwoPhaseOutcome.FINISH_OK;
} catch (XAException e) {
XAResourceErrorHandler handler = new XAResourceErrorHandler(e, (XAResource) connectableResource, xid);
return handler.handleCMRRollbackError();
} catch (Throwable e) {
jtaLogger.i18NLogger.warn_resources_arjunacore_rollbackerror(XAHelper.xidToString(xid),
connectableResource.toString(), "-", e);
return TwoPhaseOutcome.FINISH_ERROR;
}
} finally {
removeConnection();
}
}
public int topLevelCommit() {
if (tsLogger.logger.isTraceEnabled()) {
tsLogger.logger.trace("CommitMarkableResourceRecord.topLevelCommit for " + this + ", record id=" + order());
}
return commit(false);
}
public int topLevelOnePhaseCommit() {
if (tsLogger.logger.isTraceEnabled()) {
tsLogger.logger.trace("CommitMarkableResourceRecord.topLevelOnePhaseCommit for " + this + ", record id=" + order());
}
return commit(true);
}
private int commit(boolean onePhase) {
// As this can be called during recovery we check to see if we have the
// pre-crash reference
this.onePhase = onePhase;
if (connectableResource != null) {
try {
((XAResource) connectableResource).commit(xid, false);
hasCompleted = true;
committed = true;
return TwoPhaseOutcome.FINISH_OK;
} catch (XAException e) {
XAResourceErrorHandler handler = new XAResourceErrorHandler(e, (XAResource) connectableResource, xid);
int res = handler.handleCMRCommitError(onePhase);
committed = handler.isCommitted();
return res;
} catch (Throwable e) {
jtaLogger.i18NLogger.warn_resources_arjunacore_commitxaerror(XAHelper.xidToString(xid),
connectableResource.toString(), "-", e);
return TwoPhaseOutcome.FINISH_ERROR;
} finally {
if (!isPerformImmediateCleanupOfBranches) {
removeConnection();
}
}
} else {
// This is a recovery scenario
if (committed) {
return TwoPhaseOutcome.FINISH_OK;
} else {
return TwoPhaseOutcome.HEURISTIC_ROLLBACK;
}
}
}
private final void removeConnection() {
if (preparedConnection != null) {
try {
preparedConnection.close();
preparedConnection = null;
} catch (SQLException e) {
tsLogger.logger.warn("Could not close the preparedConnection", e);
}
}
}
public Uid order() {
return Uid.minUid();
}
public boolean propagateOnCommit() {
return false;
}
public int typeIs() {
return RecordType.COMMITMARKABLERESOURCE;
}
public String type() {
return "/StateManager/AbstractRecord/CommitMarkableResourceRecord";
}
public Object value() {
return connectableResource;
}
public void setValue(Object o) {
}
public int nestedAbort() {
return TwoPhaseOutcome.FINISH_OK;
}
public int nestedCommit() {
return TwoPhaseOutcome.FINISH_ERROR;
}
public int nestedPrepare() {
return TwoPhaseOutcome.PREPARE_NOTOK;
}
public void merge(AbstractRecord a) {
}
public void alter(AbstractRecord a) {
}
public boolean shouldAdd(AbstractRecord a) {
return false;
}
public boolean shouldAlter(AbstractRecord a) {
return false;
}
public boolean shouldMerge(AbstractRecord a) {
return false;
}
public boolean shouldReplace(AbstractRecord a) {
return false;
}
}
| ArjunaJTA/jta/classes/com/arjuna/ats/internal/jta/resources/arjunacore/CommitMarkableResourceRecord.java | /*
* Copyright 2013, Red Hat Middleware LLC, and individual contributors
* as indicated by the @author tags.
* See the copyright.txt in the distribution for a
* full listing of individual contributors.
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU Lesser General Public License, v. 2.1.
* This program is distributed in the hope that it will be useful, but WITHOUT A
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public License,
* v.2.1 along with this distribution; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*
* (C) 2013
* @author JBoss Inc.
*/
package com.arjuna.ats.internal.jta.resources.arjunacore;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.Enumeration;
import java.util.Map;
import java.util.Vector;
import javax.transaction.RollbackException;
import javax.transaction.Status;
import javax.transaction.Synchronization;
import javax.transaction.SystemException;
import javax.transaction.xa.XAException;
import javax.transaction.xa.XAResource;
import javax.transaction.xa.Xid;
import com.arjuna.ats.internal.jta.resources.XAResourceErrorHandler;
import org.jboss.tm.ConnectableResource;
import org.jboss.tm.XAResourceWrapper;
import com.arjuna.ats.arjuna.ObjectType;
import com.arjuna.ats.arjuna.common.Uid;
import com.arjuna.ats.arjuna.coordinator.AbstractRecord;
import com.arjuna.ats.arjuna.coordinator.BasicAction;
import com.arjuna.ats.arjuna.coordinator.RecordType;
import com.arjuna.ats.arjuna.coordinator.TwoPhaseOutcome;
import com.arjuna.ats.arjuna.coordinator.TxControl;
import com.arjuna.ats.arjuna.logging.tsLogger;
import com.arjuna.ats.arjuna.recovery.RecoveryManager;
import com.arjuna.ats.arjuna.recovery.RecoveryModule;
import com.arjuna.ats.arjuna.state.InputObjectState;
import com.arjuna.ats.arjuna.state.OutputObjectState;
import com.arjuna.ats.internal.jta.recovery.arjunacore.CommitMarkableResourceRecordRecoveryModule;
import com.arjuna.ats.internal.jta.transaction.arjunacore.TransactionImple;
import com.arjuna.ats.internal.jta.xa.XID;
import com.arjuna.ats.jta.common.JTAEnvironmentBean;
import com.arjuna.ats.jta.logging.jtaLogger;
import com.arjuna.ats.jta.utils.XAHelper;
import com.arjuna.ats.jta.xa.XidImple;
import com.arjuna.common.internal.util.propertyservice.BeanPopulator;
/**
* The CommitMarkableResourceRecord does not support nested transactions
*
* If the database is down forever that a CommitMarkableResourceRecord is linked
* to, it will have the side effect of never expiring a RecoverAtomicAction.
*
* The CommitMarkableResourceRecord assumes the following table has been
* created:
*
* syb:
*
* create table xids (xid varbinary(144), transactionManagerID varchar(64),
* actionuid varbinary(28))
*
* ora:
*
* create table xids (xid RAW(144), transactionManagerID varchar(64), actionuid
* RAW(28))
*
* psql:
*
* create table xids (xid bytea, transactionManagerID varchar(64), actionuid
* bytea)
*
* h2:
*
* create table xids (xid varbinary(144), transactionManagerID varchar(64),
* actionuid varbinary(28))
*
* sybase notes: sp_configure "lock scheme",0,datarows
*/
public class CommitMarkableResourceRecord extends AbstractRecord {
private final String tableName;
private Xid xid;
private ConnectableResource connectableResource;
private boolean onePhase = false;
private String commitMarkableJndiName;
private boolean committed;
private BasicAction basicAction;
private String productName;
private String productVersion;
private boolean hasCompleted;
private static CommitMarkableResourceRecordRecoveryModule commitMarkableResourceRecoveryModule;
private static final JTAEnvironmentBean jtaEnvironmentBean = BeanPopulator
.getDefaultInstance(JTAEnvironmentBean.class);
private static final Map<String, String> commitMarkableResourceTableNameMap = jtaEnvironmentBean
.getCommitMarkableResourceTableNameMap();
private static final String defaultTableName = jtaEnvironmentBean
.getDefaultCommitMarkableTableName();
private boolean isPerformImmediateCleanupOfBranches = jtaEnvironmentBean
.isPerformImmediateCleanupOfCommitMarkableResourceBranches();
private Connection preparedConnection;
private static final boolean isNotifyRecoveryModuleOfCompletedBranches = jtaEnvironmentBean
.isNotifyCommitMarkableResourceRecoveryModuleOfCompleteBranches();
private static final Map<String, Boolean> isPerformImmediateCleanupOfCommitMarkableResourceBranchesMap = jtaEnvironmentBean
.getPerformImmediateCleanupOfCommitMarkableResourceBranchesMap();
static {
commitMarkableResourceRecoveryModule = null;
RecoveryManager recMan = RecoveryManager.manager();
Vector recoveryModules = recMan.getModules();
if (recoveryModules != null) {
Enumeration modules = recoveryModules.elements();
while (modules.hasMoreElements()) {
RecoveryModule m = (RecoveryModule) modules.nextElement();
if (m instanceof CommitMarkableResourceRecordRecoveryModule) {
commitMarkableResourceRecoveryModule = (CommitMarkableResourceRecordRecoveryModule) m;
break;
}
}
}
}
/**
* For recovery
*/
public CommitMarkableResourceRecord() {
if (tsLogger.logger.isTraceEnabled()) {
tsLogger.logger.trace("CommitMarkableResourceRecord.CommitMarkableResourceRecord (), record id=" + order());
}
tableName = null;
}
public CommitMarkableResourceRecord(TransactionImple tx,
ConnectableResource xaResource, final Xid xid,
BasicAction basicAction) throws IllegalStateException,
RollbackException, SystemException {
super(new Uid(), null, ObjectType.ANDPERSISTENT);
if (tsLogger.logger.isTraceEnabled()) {
tsLogger.logger.trace("CommitMarkableResourceRecord.CommitMarkableResourceRecord ( " + tx + ", " + xaResource + ", "
+ xid + ", " + basicAction + " ), record id=" + order());
}
this.connectableResource = xaResource;
XAResourceWrapper xaResourceWrapper = ((XAResourceWrapper) xaResource);
this.commitMarkableJndiName = xaResourceWrapper.getJndiName();
this.productName = xaResourceWrapper.getProductName();
this.productVersion = xaResourceWrapper.getProductVersion();
this.xid = xid;
this.basicAction = basicAction;
String tableName = commitMarkableResourceTableNameMap
.get(commitMarkableJndiName);
if (tableName != null) {
this.tableName = tableName;
} else {
this.tableName = defaultTableName;
}
Boolean boolean1 = isPerformImmediateCleanupOfCommitMarkableResourceBranchesMap
.get(commitMarkableJndiName);
if (boolean1 != null) {
isPerformImmediateCleanupOfBranches = boolean1;
}
if (isPerformImmediateCleanupOfBranches) {
tx.registerSynchronization(new Synchronization() {
@Override
public void beforeCompletion() {
}
@Override
public void afterCompletion(int status) {
if (!onePhase && status == Status.STATUS_COMMITTED) {
Connection connection = null;
try {
connection = ((Connection) connectableResource
.getConnection());
connection.setAutoCommit(false);
String sql = "DELETE from "
+ CommitMarkableResourceRecord.this.tableName
+ " where xid in (?)";
PreparedStatement prepareStatement = connection
.prepareStatement(sql);
try {
XID toSave = ((XidImple) xid).getXID();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(
baos);
dos.writeInt(toSave.formatID);
dos.writeInt(toSave.gtrid_length);
dos.writeInt(toSave.bqual_length);
dos.writeInt(toSave.data.length);
dos.write(toSave.data);
dos.flush();
prepareStatement.setBytes(1, baos.toByteArray());
if (prepareStatement.executeUpdate() != 1) {
tsLogger.logger
.error("Update was not successfull");
connection.rollback();
} else {
connection.commit();
}
} catch (IOException e) {
tsLogger.logger
.warn("Could not generate prepareStatement paramaters",
e);
} finally {
try {
prepareStatement.close();
} catch (SQLException e) {
tsLogger.logger
.warn("Could not close the prepared statement",
e);
}
}
} catch (Throwable e1) {
tsLogger.logger
.warn("Could not delete CommitMarkableResourceRecord entry, will rely on RecoveryModule",
e1);
} finally {
if (connection != null) {
try {
connection.close();
} catch (SQLException e) {
tsLogger.logger
.warn("Could not close the preparedConnection",
e);
}
}
}
}
}
});
} else if (isNotifyRecoveryModuleOfCompletedBranches) {
tx.registerSynchronization(new Synchronization() {
@Override
public void beforeCompletion() {
}
@Override
public void afterCompletion(int status) {
if (!onePhase && status == Status.STATUS_COMMITTED) {
commitMarkableResourceRecoveryModule
.notifyOfCompletedBranch(
commitMarkableJndiName, xid);
}
}
});
}
}
public String getProductName() {
return productName;
}
public String getProductVersion() {
return productVersion;
}
public String getJndiName() {
return commitMarkableJndiName;
}
public void updateOutcome(boolean committed) {
this.hasCompleted = true;
this.committed = committed;
}
/**
* We need to save this so we know there was a ConnectableResource in the
* intentions list.
*/
public boolean doSave() {
return true;
}
public boolean save_state(OutputObjectState os, int t) {
boolean res = false;
try {
// We store these information so that during recovery we can query
// the resource
// manager to see if it had committed prior to any potential crash
if (tsLogger.logger.isTraceEnabled()) {
tsLogger.logger.trace("pack: " + commitMarkableJndiName);
}
os.packString(commitMarkableJndiName);
if (tsLogger.logger.isTraceEnabled()) {
tsLogger.logger.trace("pack: " + xid);
}
XidImple.pack(os, xid);
os.packBoolean(hasCompleted);
if (hasCompleted) {
os.packBoolean(committed);
}
os.packString(productName);
os.packString(productVersion);
res = super.save_state(os, t);
} catch (Exception e) {
jtaLogger.logger.warn(
"Could not save_state: " + XAHelper.xidToString(xid), e);
}
return res;
}
public boolean restore_state(InputObjectState os, int t) {
boolean res = false;
try {
commitMarkableJndiName = os.unpackString();
if (tsLogger.logger.isTraceEnabled()) {
tsLogger.logger.trace("unpack: " + commitMarkableJndiName);
}
xid = XidImple.unpack(os);
if (tsLogger.logger.isTraceEnabled()) {
tsLogger.logger.trace("unpack: " + xid);
}
if (os.unpackBoolean()) {
committed = os.unpackBoolean();
} else {
// This will return true if the
// CommitMarkableRecoveryModule is
// between phases and the XID
// has not been GC'd
committed = commitMarkableResourceRecoveryModule.wasCommitted(
commitMarkableJndiName, xid);
}
productName = os.unpackString();
productVersion = os.unpackString();
res = super.restore_state(os, t);
} catch (Exception e) {
jtaLogger.logger.warn(
"Could not restore_state" + XAHelper.xidToString(xid), e);
}
return res;
}
/**
* This will add the required recovery data about this resource into the
* resources preparedConnection. If the preparedConnection is in read only
* mode, we do not need to persist this information.
*/
public int topLevelPrepare() {
if (tsLogger.logger.isTraceEnabled()) {
tsLogger.logger.trace("CommitMarkableResourceRecord.topLevelPrepare for " + this + ", record id=" + order());
}
try {
PreparedStatement prepareStatement = null;
preparedConnection = (Connection) connectableResource
.getConnection();
try {
prepareStatement = preparedConnection
.prepareStatement("insert into "
+ tableName
+ " (xid, transactionManagerID, actionuid) values (?,?,?)");
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
XID toSave = ((XidImple) xid).getXID();
dos.writeInt(toSave.formatID);
dos.writeInt(toSave.gtrid_length);
dos.writeInt(toSave.bqual_length);
dos.writeInt(toSave.data.length);
dos.write(toSave.data);
dos.flush();
prepareStatement.setBytes(1, baos.toByteArray());
prepareStatement.setString(2, TxControl.getXANodeName());
prepareStatement.setBytes(3, basicAction.get_uid().getBytes());
if (prepareStatement.executeUpdate() != 1) {
tsLogger.logger.warn("Update was not successful");
removeConnection();
return TwoPhaseOutcome.PREPARE_NOTOK;
}
} finally {
if (prepareStatement != null)
prepareStatement.close();
}
return TwoPhaseOutcome.PREPARE_OK;
} catch (Throwable t) {
tsLogger.logger.error(
"Could not add recovery data to the 1PC resource", t);
return TwoPhaseOutcome.PREPARE_NOTOK;
}
}
public int topLevelAbort() {
if (tsLogger.logger.isTraceEnabled()) {
tsLogger.logger.trace("CommitMarkableResourceRecord.topLevelAbort for " + this + ", record id=" + order());
}
try {
try {
// This can never be null as it can only ever be called before
// crash
// when we have a reference
// on a connectableResource still. Although topLevelAbort can be
// called for RecoverAtomicAction, it
// can only do that for resources after the head position in the
// preparedList, we know this resource
// must be first
((XAResource) connectableResource).rollback(xid);
hasCompleted = true;
committed = false;
return TwoPhaseOutcome.FINISH_OK;
} catch (XAException e) {
XAResourceErrorHandler handler = new XAResourceErrorHandler(e, (XAResource) connectableResource, xid);
return handler.handleCMRRollbackError();
} catch (Throwable e) {
jtaLogger.i18NLogger.warn_resources_arjunacore_rollbackerror(XAHelper.xidToString(xid),
connectableResource.toString(), "-", e);
return TwoPhaseOutcome.FINISH_ERROR;
}
} finally {
removeConnection();
}
}
public int topLevelCommit() {
if (tsLogger.logger.isTraceEnabled()) {
tsLogger.logger.trace("CommitMarkableResourceRecord.topLevelCommit for " + this + ", record id=" + order());
}
return commit(false);
}
public int topLevelOnePhaseCommit() {
if (tsLogger.logger.isTraceEnabled()) {
tsLogger.logger.trace("CommitMarkableResourceRecord.topLevelOnePhaseCommit for " + this + ", record id=" + order());
}
return commit(true);
}
private int commit(boolean onePhase) {
// As this can be called during recovery we check to see if we have the
// pre-crash reference
this.onePhase = onePhase;
if (connectableResource != null) {
try {
((XAResource) connectableResource).commit(xid, false);
hasCompleted = true;
committed = true;
return TwoPhaseOutcome.FINISH_OK;
} catch (XAException e) {
XAResourceErrorHandler handler = new XAResourceErrorHandler(e, (XAResource) connectableResource, xid);
int res = handler.handleCMRCommitError(onePhase);
committed = handler.isCommitted();
return res;
} catch (Throwable e) {
jtaLogger.i18NLogger.warn_resources_arjunacore_commitxaerror(XAHelper.xidToString(xid),
connectableResource.toString(), "-", e);
return TwoPhaseOutcome.FINISH_ERROR;
} finally {
if (!isPerformImmediateCleanupOfBranches) {
removeConnection();
}
}
} else {
// This is a recovery scenario
if (committed) {
return TwoPhaseOutcome.FINISH_OK;
} else {
return TwoPhaseOutcome.HEURISTIC_ROLLBACK;
}
}
}
private final void removeConnection() {
if (preparedConnection != null) {
try {
preparedConnection.close();
preparedConnection = null;
} catch (SQLException e) {
tsLogger.logger.warn("Could not close the preparedConnection", e);
}
}
}
public Uid order() {
return Uid.minUid();
}
public boolean propagateOnCommit() {
return false;
}
public int typeIs() {
return RecordType.COMMITMARKABLERESOURCE;
}
public String type() {
return "/StateManager/AbstractRecord/CommitMarkableResourceRecord";
}
public Object value() {
return connectableResource;
}
public void setValue(Object o) {
}
public int nestedAbort() {
return TwoPhaseOutcome.FINISH_OK;
}
public int nestedCommit() {
return TwoPhaseOutcome.FINISH_ERROR;
}
public int nestedPrepare() {
return TwoPhaseOutcome.PREPARE_NOTOK;
}
public void merge(AbstractRecord a) {
}
public void alter(AbstractRecord a) {
}
public boolean shouldAdd(AbstractRecord a) {
return false;
}
public boolean shouldAlter(AbstractRecord a) {
return false;
}
public boolean shouldMerge(AbstractRecord a) {
return false;
}
public boolean shouldReplace(AbstractRecord a) {
return false;
}
}
| [JBTM-2165] Make CMR synchs interposed so that they are ordered ater session synchs
| ArjunaJTA/jta/classes/com/arjuna/ats/internal/jta/resources/arjunacore/CommitMarkableResourceRecord.java | [JBTM-2165] Make CMR synchs interposed so that they are ordered ater session synchs |
|
Java | lgpl-2.1 | b557567c2776233ee12fac93bcbed9af5513ccf2 | 0 | xwiki/xwiki-enterprise,xwiki/xwiki-enterprise | /*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.xwiki.test.selenium;
import junit.framework.Test;
import org.xwiki.test.selenium.framework.AbstractXWikiTestCase;
import org.xwiki.test.selenium.framework.ColibriSkinExecutor;
import org.xwiki.test.selenium.framework.XWikiTestSuite;
/**
* Verify the overall Administration application features.
*
* @version $Id$
*/
public class AdministrationTest extends AbstractXWikiTestCase
{
public static Test suite()
{
XWikiTestSuite suite = new XWikiTestSuite("Verify the Administration application features.");
suite.addTestSuite(AdministrationTest.class, ColibriSkinExecutor.class);
return suite;
}
@Override
public void setUp() throws Exception
{
super.setUp();
loginAsAdmin();
}
/**
* This method makes the following tests :
*
* <ul>
* <li>Login as global admin.</li>
* <li>Validate presence of default sections for global and space sections.</li>
* <li>Validate presence of application administration sections at global level only.</li>
* </ul>
*/
public void testGlobalAndSpaceSections()
{
clickLinkWithText("Administer Wiki");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Editing')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Localization')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Email')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Presentation')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Elements')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Registration')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Users')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Groups')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Rights')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Registration')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Import')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Export')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Templates')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=MessageStream')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Panels.PanelWizard')]");
// select space administration
getSelenium().select("goto-select", "label=Main");
waitPage();
assertElementNotPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Editing')]");
assertElementNotPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Localization')]");
assertElementNotPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Email')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Presentation')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Elements')]");
assertElementNotPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Registration')]");
assertElementNotPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Users')]");
assertElementNotPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Groups')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Rights')]");
assertElementNotPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Registration')]");
assertElementNotPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Import')]");
assertElementNotPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Export')]");
assertElementNotPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Templates')]");
assertElementNotPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=MessageStream')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Panels.PanelWizard')]");
}
/*
* Test to see an application page is included only if that application exists
*/
public void testApplicationSection()
{
// Delete the Blog.Categories page and test it's not present in the admin global menu anymore
deletePage("XWiki", "SearchAdmin");
clickLinkWithText("Administer Wiki");
assertElementNotPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Search')]");
restorePage("XWiki", "SearchAdmin");
}
/*
* Test modifying XWiki.XWikiPreferences multi-language field and save it.
*/
public void testSettingXWikiPreferences()
{
clickLinkWithText("Administer Wiki");
getSelenium().select("goto-select", "label=Wiki administration");
clickLinkWithXPath("//a[text()='Localization']", true);
getSelenium().select("//select[@name='XWiki.XWikiPreferences_0_multilingual']", "label=Yes");
clickLinkWithXPath("//input[@value='Save']", true);
assertElementPresent("//span[@id='headerlanguages']");
}
/*
* Test adding a new category in Blog Categories
*/
/* Disabled until the new blog can insert its own administration page.
public void testBlogAdmin()
{
open("XWiki", "XWikiPreferences", "admin");
// select global administration
clickLinkWithLocator("//span[text()='General']", true);
getSelenium().select("//select[@id='XWiki.XWikiPreferences_0_editor']", "label=Text");
clickLinkWithLocator("//input[@value='Save']");
assertElementPresent("//span[@id='showsectionswrapper']");
clickLinkWithXPath("//a[@id='showsections']", false);
assertElementPresent("//span[@id='hidesectionswrapper']");
clickLinkWithLocator("//span[text()='Blog categories']");
setFieldValue("name", "New Category");
setFieldValue("description", "New Category Content");
clickLinkWithLocator("//input[@value='Add']", true);
assertTextPresent("New Category");
}*/
/*
* Test Panel Wizard
*/
public void testPanelsAdmin()
{
open("XWiki", "XWikiPreferences", "admin");
// test panel wizard at global level
clickLinkWithLocator("//a[text()='Panel Wizard']");
waitForCondition("selenium.page().bodyText().indexOf('Panel List')!=-1;");
clickLinkWithXPath("//a[@href='#PageLayoutSection']", false);
waitForCondition("selenium.isElementPresent(\"//div[@id='bothcolumns']\")!=false;");
clickLinkWithXPath("//div[@id='bothcolumns']", false);
waitForCondition("selenium.page().bodyText().indexOf('Page Layout')!=-1;");
clickLinkWithXPath("//a[@href='#PanelListSection']", false);
getSelenium().dragAndDropToObject("//div[@class='panel expanded QuickLinks']", "//div[@id='leftPanels']");
clickLinkWithXPath("//a[text()='Save the new layout']", false);
waitForCondition("selenium.isAlertPresent()");
assertEquals("The layout has been saved properly.", getSelenium().getAlert());
open("Main", "WebHome");
assertElementPresent("leftPanels");
assertElementPresent("rightPanels");
// Revert changes
open("XWiki", "XWikiPreferences", "admin");
clickLinkWithLocator("//a[text()='Panel Wizard']");
waitForCondition("selenium.page().bodyText().indexOf('Page Layout')!=-1;");
clickLinkWithXPath("//a[@href='#PageLayoutSection']", false);
waitForCondition("selenium.isElementPresent(\"//div[@id='rightcolumn']\")!=false;");
clickLinkWithXPath("//div[@id='rightcolumn']", false);
clickLinkWithXPath("//a[text()='Save the new layout']", false);
waitForCondition("selenium.isAlertPresent()");
assertEquals("The layout has been saved properly.", getSelenium().getAlert());
open("Main", "WebHome");
assertElementNotPresent("leftPanels");
assertElementPresent("rightPanels");
// test panel wizard at space level
open("TestPanelsAdmin", "WebHome", "edit", "editor=wiki");
setFieldValue("content", "aaa");
clickEditSaveAndView();
open("TestPanelsAdmin", "WebPreferences", "admin");
clickLinkWithLocator("//a[text()='Panel Wizard']");
waitForCondition("selenium.page().bodyText().indexOf('Page Layout')!=-1;");
clickLinkWithXPath("//a[@href='#PageLayoutSection']", false);
waitForCondition("selenium.isElementPresent(\"//div[@id='leftcolumn']\")!=false;");
clickLinkWithXPath("//div[@id='leftcolumn']", false);
waitForCondition("selenium.page().bodyText().indexOf('Panel List')!=-1;");
clickLinkWithXPath("//a[@href='#PanelListSection']", false);
getSelenium().dragAndDropToObject("//div[@class='panel expanded QuickLinks']", "//div[@id='leftPanels']");
clickLinkWithXPath("//a[text()='Save the new layout']", false);
waitForCondition("selenium.isAlertPresent()");
assertEquals("The layout has been saved properly.", getSelenium().getAlert());
open("TestPanelsAdmin", "WebHome");
assertElementPresent("leftPanels");
assertElementPresent("//div[@class='panel expanded QuickLinks']");
open("XWiki", "WebHome");
assertElementNotPresent("leftPanels");
assertElementNotPresent("//div[@class='panel expanded QuickLinks']");
}
/*
* Test add configurable application to existing section.
*
* This test depends on the "Presentation" section existing.
* Tests: XWiki.ConfigurableClass
*/
public void testAddConfigurableApplicationInExistingSection()
{
// Create the configurable for global admin.
createConfigurableApplication("Main", "TestConfigurable", "Presentation", true);
// Check it's available in global section.
open("XWiki", "XWikiPreferences", "admin", "editor=globaladmin§ion=Presentation");
assertConfigurationPresent("Main", "TestConfigurable");
// Check it's not available in space section.
open("Main", "WebPreferences", "admin", "editor=spaceadmin§ion=Presentation");
assertConfigurationNotPresent("Main", "TestConfigurable");
// Switch application to non-global
open("Main", "TestConfigurable", "edit", "editor=object");
getSelenium().uncheck("XWiki.ConfigurableClass_0_configureGlobally");
clickEditSaveAndView();
// Check that it is available in space section.
open("Main", "WebPreferences", "admin", "editor=spaceadmin§ion=Presentation");
assertConfigurationPresent("Main", "TestConfigurable");
// Check that it's not available in another space.
open("XWiki", "WebPreferences", "admin", "editor=spaceadmin§ion=Presentation");
assertConfigurationNotPresent("Main", "TestConfigurable");
// Check that it's not available in global section.
open("XWiki", "XWikiPreferences", "admin", "editor=globaladmin§ion=Presentation");
assertConfigurationNotPresent("Main", "TestConfigurable");
}
/**
* Test add configurable application to a nonexistent section.
* <p>
* This test depends on the "HopingThereIsNoSectionByThisName" section not existing.<br/>
* Tests: XWiki.ConfigurableClass
*/
public void testAddConfigurableApplicationInNonexistantSection()
{
String section = "HopingThereIsNoSectionByThisName";
// Create the configurable for global admin.
createConfigurableApplication("Main", "TestConfigurable", section, true);
// Check it's available in global section.
clickLinkWithText("Administer Wiki");
assertTrue(isAdminMenuItemPresent(section));
clickLinkWithText(section);
assertConfigurationPresent("Main", "TestConfigurable");
// Check that it's not available in space section.
open("Main", "WebPreferences", "admin");
// Assert there is no menu item in the administration menu for our configurable application.
assertFalse(isAdminMenuItemPresent(section));
}
/**
* Fails if a user can create a Configurable application without having edit access to the configuration page (in
* this case: XWikiPreferences)
* <p>
* Tests: XWiki.ConfigurableClass
*/
public void testConfigurableCreatedByUnauthorizedWillNotExecute()
{
// Make sure the configurable page doesn't exist because otherwise we may fail to overwrite it with a
// non-administrator user.
deletePage("Main", "testConfigurableCreatedByUnauthorizedWillNotExecute");
// Create the configurable for global administrator.
loginAndRegisterUser("anotherJoker", "bentOnMalice", false);
String nonExistingSection = "HopingThereIsNoSectionByThisName";
createConfigurableApplication("Main",
"testConfigurableCreatedByUnauthorizedWillNotExecute",
nonExistingSection, true);
loginAsAdmin();
open("XWiki", "XWikiPreferences", "admin", "editor=globaladmin§ion=" + nonExistingSection);
assertConfigurationNotEditable("Main", "testConfigurableCreatedByUnauthorizedWillNotExecute");
}
/*
* Creates a document with 2 configurable objects, one gets configured globally in one section and displays
* 2 configuration fields, the other is configured in the space in another section and displays the other 2
* fields. Fails if they are not displayed as they should be.
*
* Tests: XWiki.ConfigurableClass
*/
public void testApplicationConfiguredInMultipleSections()
{
String space = "Main";
String page = "TestConfigurable";
createConfigurableApplication(space, page, "TestSection1", true);
open(space, page, "edit", "editor=object");
// Add a second configurable object.
setFieldValue("classname", "XWiki.ConfigurableClass");
clickButtonAndContinue("//input[@name='action_objectadd']");
setFieldValue("XWiki.ConfigurableClass_1_displayInSection", "TestSection2");
setFieldValue("XWiki.ConfigurableClass_1_heading", "Some Other Heading");
setFieldValue("XWiki.ConfigurableClass_1_configurationClass", space + "." + page);
getSelenium().uncheck("XWiki.ConfigurableClass_1_configureGlobally");
// Set propertiesToShow so that each config only shows half of the properties.
setFieldValue("XWiki.ConfigurableClass_1_propertiesToShow", "TextArea, Select");
setFieldValue("XWiki.ConfigurableClass_0_propertiesToShow", "String, Boolean");
clickEditSaveAndView();
// Assert that half of the configuration shows up but not the other half.
open("XWiki", "XWikiPreferences", "admin", "editor=globaladmin§ion=TestSection1");
assertElementPresent("//div[@id='admin-page-content']/h2[@id='HSomeHeading']/span");
// Fields
String fullName = space + "." + page;
String form = "//div[@id='admin-page-content']/form[@action='/xwiki/bin/save/" + space + "/" + page + "']";
assertElementPresent(form + "/fieldset//label['String']");
assertElementPresent(form + "/fieldset//input[@name='" + fullName + "_0_String']");
assertElementPresent(form + "/fieldset//label['Boolean']");
assertElementPresent(form + "/fieldset//select[@name='" + fullName + "_0_Boolean']");
assertElementPresent(form + "/fieldset/input[@id='" + fullName + "_redirect']");
// xredirect
assertElementPresent(form + "/fieldset/input[@value='" + getSelenium().getLocation() + "'][@name='xredirect']");
// Save button
// assertElementPresent(form + "/div/p/span/input[@type='submit']");
// Javascript injects a save button outside of the form and removes the default save button.
waitForElement("//div/div/p/span/input[@type='submit'][@value='Save']");
// Should not be here
assertElementNotPresent(form + "/fieldset//textarea[@name='" + fullName + "_0_TextArea']");
assertElementNotPresent(form + "/fieldset//select[@name='" + fullName + "_0_Select']");
// Now we go to where the other half of the configuration should be.
open("Main", "WebPreferences", "admin", "editor=spaceadmin§ion=TestSection2");
assertElementPresent("//h2[@id='HSomeOtherHeading']/span");
// Fields
assertElementPresent(form + "/fieldset//label");
assertElementPresent(form + "/fieldset//textarea[@name='" + fullName + "_0_TextArea']");
assertElementPresent(form + "/fieldset//select[@name='" + fullName + "_0_Select']");
assertElementPresent(form + "/fieldset/input[@id='" + fullName + "_redirect']");
// xredirect
assertElementPresent(form + "/fieldset/input[@value='" + getSelenium().getLocation() + "'][@name='xredirect']");
// Save button
// assertElementPresent(form + "/div/p/span/input[@type='submit']");
// Javascript injects a save button outside of the form and removes the default save button.
waitForElement("//div/div/p/span/input[@type='submit'][@value='Save']");
// Should not be here
assertElementNotPresent(form + "/fieldset//input[@name='" + fullName + "_0_String']");
assertElementNotPresent(form + "/fieldset//select[@name='" + fullName + "_0_Boolean']");
}
/*
* Make sure html macros and pre tags are not being stripped
* @see: http://jira.xwiki.org/jira/browse/XAADMINISTRATION-141
*
* Tests: XWiki.ConfigurableClass
*/
public void testNotStrippingHtmlMacros()
{
String space = "Main";
String page = "TestConfigurable";
String test = "{{html}} <pre> {{html clean=\"false\"}} </pre> {{/html}}";
String fullName = space + "." + page;
String form = "//div[@id='admin-page-content']/form[@action='/xwiki/bin/save/" + space + "/" + page + "']";
createConfigurableApplication(space, page, "TestSection1", true);
open(space, page, "edit", "editor=object");
setFieldValue(fullName + "_0_TextArea", test);
setFieldValue(fullName + "_0_String", test);
clickEditSaveAndView();
open("XWiki", "XWikiPreferences", "admin", "editor=globaladmin§ion=TestSection1");
waitForTextPresent(form + "/fieldset//textarea[@name='" + fullName + "_0_TextArea']", test);
// Getting content from an input field required getValue and not getText
assertTrue(getSelenium().getValue(form + "/fieldset//input[@name='" + fullName + "_0_String']").equals(test));
}
/*
* If a value is specified for linkPrefix, then a link is generated with linkPrefix + prettyName of the property from
* the configuration class.
* linkPrefix = "http://www.xwiki.org/bin/view/Main/"
* property prettyName = "WebHome"
* generated link should equal "http://www.xwiki.org/bin/view/Main/WebHome"
*
* Tests: XWiki.ConfigurableClass
*/
public void testLabelLinkGeneration()
{
String space = "Main";
String page = "TestConfigurable";
createConfigurableApplication(space, page, "TestSection3", true);
open(space, page, "edit", "editor=object");
setFieldValue("XWiki.ConfigurableClass_0_linkPrefix", "TheLinkPrefix");
clickEditSaveAndView();
open("XWiki", "XWikiPreferences", "admin", "editor=globaladmin§ion=TestSection3");
assertElementPresent("//form/fieldset//a[@href='TheLinkPrefixString']");
assertElementPresent("//form/fieldset//a[@href='TheLinkPrefixBoolean']");
assertElementPresent("//form/fieldset//a[@href='TheLinkPrefixTextArea']");
assertElementPresent("//form/fieldset//a[@href='TheLinkPrefixSelect']");
}
/*
* Fails unless XWiki.ConfigurableClass locks each page on view and unlocks any other configurable page.
* Also fails if codeToExecute is not being evaluated.
*
* Tests: XWiki.ConfigurableClass
*/
public void testLockingAndUnlocking()
{
String space = "Main";
String page1 = "TestConfigurable";
String page2 = "TestConfigurable2";
String isThisPageLocked = "{{velocity}}Is This Page Locked $doc.getLocked(){{/velocity}}";
createConfigurableApplication(space, page1, "TestSection4", true);
createConfigurableApplication(space, page2, "TestSection5", true);
open(space, page1, "edit", "editor=wiki");
setFieldValue("content", isThisPageLocked);
clickEditSaveAndView();
open(space, page2, "edit", "editor=wiki");
setFieldValue("content", isThisPageLocked);
clickEditSaveAndView();
// Now we go to the documents and see which is locked.
// Clear any locks by visiting the main page.
open("XWiki", "XWikiPreferences", "admin");
open("XWiki", "XWikiPreferences", "admin", "editor=globaladmin§ion=TestSection4");
// We have to switch user context without logging out, logging out removes all locks.
open(space, page1, "view");
open(getSelenium().getLocation().replaceAll("http://localhost", "http://127.0.0.1"));
assertTextPresent("Is This Page Locked true");
open(space, page2, "view");
open(getSelenium().getLocation().replaceAll("http://localhost", "http://127.0.0.1"));
assertTextNotPresent("Is This Page Locked true");
open("XWiki", "XWikiPreferences", "admin", "editor=globaladmin§ion=TestSection5");
open(space, page1, "view");
open(getSelenium().getLocation().replaceAll("http://localhost", "http://127.0.0.1"));
assertTextNotPresent("Is This Page Locked true");
open(space, page2, "view");
open(getSelenium().getLocation().replaceAll("http://localhost", "http://127.0.0.1"));
assertTextPresent("Is This Page Locked true");
}
/*
* If CodeToExecute is defined in a configurable app, then it should be evaluated.
* Also header should be evaluated and not just printed.
* If XWiki.ConfigurableClass is saved with programming rights, it should resave itself so that it doesn't have them.
*/
public void testCodeToExecutionAndAutoSandboxing()
{
String space = "Main";
String page = "TestConfigurable";
// Note: We are forced to use the silent notation because Selenium 2.20.0 doesn't escape properly the string
// passed to the Selenium.type() method and it seems ${...} has a special meaning, throwing an exception with
// the message "replacement is undefined". Escaping the value using backslash or doubling the { didn't work.
// See http://code.google.com/p/selenium/issues/detail?id=3510 .
String codeToExecute = "#set($code = 's sh')"
+ "Thi$!{code}ould be displayed."
+ "#if($xcontext.hasProgrammingRights())"
+ "This should not be displayed."
+ "#end";
String heading = "#set($code = 'his sho')"
+ "T$!{code}uld also be displayed.";
createConfigurableApplication(space, page, "TestSection6", true);
open(space, page, "edit", "editor=object");
setFieldValue("XWiki.ConfigurableClass_0_codeToExecute", codeToExecute);
setFieldValue("XWiki.ConfigurableClass_0_heading", heading);
setFieldValue("XWiki.ConfigurableClass_0_configurationClass", "");
clickEditSaveAndView();
// Our admin will foolishly save XWiki.ConfigurableClass, giving it programming rights.
open("XWiki", "ConfigurableClass", "edit", "editor=wiki");
// Since we modify ConfigurableClass, we must modify it back after to prevent polluting further tests.
// See the previous note about silent notation to understand why we perform a string replacement.
String originalContent = getFieldValue("content").replace("${", "$!{");
try {
setFieldValue("content", originalContent
+ "{{velocity}}Has Programming permission: $xcontext.hasProgrammingRights(){{/velocity}}");
clickEditSaveAndContinue();
// Now we look at the section for our configurable.
open("XWiki", "ConfigurableClass", "view", "editor=globaladmin§ion=TestSection6");
assertTextPresent("This should be displayed.");
assertTextPresent("This should also be displayed.");
assertTextNotPresent("This should not be displayed.");
assertTextPresent("Has Programming permission: false");
// Make sure javascript has not added a Save button.
assertElementNotPresent("//div/div/p/span/input[@type='submit'][@value='Save']");
} finally {
open("XWiki", "ConfigurableClass", "edit", "editor=wiki");
setFieldValue("content", originalContent);
clickEditSaveAndContinue();
}
}
/*
* Proves that ConfigurationClass#codeToExecute is not rendered inline even if there is no
* custom configuration class and the on;y content is custom content.
* Tests: XWiki.ConfigurableClass
*/
public void testCodeToExecuteNotInlineIfNoConfigurationClass()
{
String space = "Main";
String page = "TestConfigurable";
String test = "{{html}} <div> <p> hello </p> </div> {{/html}}";
open(space, page, "delete", "confirm=1");
createConfigurableApplication(space, page, "TestSection1", true);
open(space, page, "edit", "editor=object");
setFieldValue("XWiki.ConfigurableClass_0_configurationClass", "");
setFieldValue("XWiki.ConfigurableClass_0_codeToExecute", test);
clickEditSaveAndView();
open("XWiki", "XWikiPreferences", "admin", "editor=globaladmin§ion=TestSection1");
assertElementNotPresent("//span[@class='xwikirenderingerror']");
}
/*
* Proves that ConfigurationClass#codeToExecute is not rendered inline whether it's at the top of the
* form or inside of the form.
* Tests: XWiki.ConfigurableClass
*/
public void testCodeToExecuteNotInline()
{
String space = "Main";
String page = "TestConfigurable";
String test = "{{html}} <div> <p> hello </p> </div> {{/html}}";
createConfigurableApplication(space, page, "TestSection1", true);
open(space, page, "edit", "editor=object");
setFieldValue("classname", "XWiki.ConfigurableClass");
clickButtonAndContinue("//input[@name='action_objectadd']");
setFieldValue("XWiki.ConfigurableClass_0_codeToExecute", test);
setFieldValue("XWiki.ConfigurableClass_0_propertiesToShow", "String, Boolean");
setFieldValue("XWiki.ConfigurableClass_1_displayInSection", "TestSection1");
setFieldValue("XWiki.ConfigurableClass_1_configurationClass", space + "." + page);
setFieldValue("XWiki.ConfigurableClass_1_propertiesToShow", "TextArea, Select");
setFieldValue("XWiki.ConfigurableClass_1_codeToExecute", test);
getSelenium().check("XWiki.ConfigurableClass_1_configureGlobally");
clickEditSaveAndView();
open("XWiki", "XWikiPreferences", "admin", "editor=globaladmin§ion=TestSection1");
assertElementNotPresent("//span[@class='xwikirenderingerror']");
}
/**
* Test functionality of the ForgotUsername page:
* <ul>
* <li>A user can be found using correct email</li>
* <li>No user is found using wrong email</li>
* <li>Email text is properly escaped</li>
* </ul>
*/
public void testForgotUsername()
{
String space = "Test";
String page = "SQLTestPage";
String mail = "[email protected]"; // default Admin mail
String user = "Admin";
String badMail = "[email protected]";
// Ensure there is a page we will try to find using HQL injection
editInWikiEditor(space, page);
setFieldValue("title", page);
setFieldValue("content", page);
clickEditSaveAndView();
// test that it finds the correct user
open("XWiki", "ForgotUsername");
setFieldValue("e", mail);
submit("//input[@type='submit']"); // there are no other buttons
assertTextNotPresent("No account is registered using this email address");
assertElementPresent("//div[@id='xwikicontent']//strong[text()='" + user + "']");
// test that bad mail results in no results
open("XWiki", "ForgotUsername");
setFieldValue("e", badMail);
submit("//input[@type='submit']"); // there are no other buttons
assertTextPresent("No account is registered using this email address");
assertElementNotPresent("//div[@id='xwikicontent']//strong[@value='" + user + "']");
// XWIKI-4920 test that the email is properly escaped
open("XWiki", "ForgotUsername");
setFieldValue("e", "a' synta\\'x error");
submit("//input[@type='submit']"); // there are no other buttons
assertTextPresent("No account is registered using this email address");
assertTextNotPresent("Error");
}
/**
* Asserts that a menu item with the given label is present on the administration menu.
*/
public boolean isAdminMenuItemPresent(String label)
{
return isElementPresent("//*[contains(@class, 'admin-menu')]//a[. = '" + label + "']");
}
/*
* Fails if there is an administration icon for the named section.
* Must be in the administration app first.
* Tests: XWiki.ConfigurableClass
*/
public void assertConfigurationIconNotPresent(String section)
{
assertElementNotPresent("//div[contains(@class,'admin-menu')]//li[contains(@href,'section=" + section + "')]");
}
/**
* Will fail unless it detects a configuration of the type created by createConfigurableApplication.<br/>
* Tests: XWiki.ConfigurableClass
*/
public void assertConfigurationPresent(String space, String page)
{
assertElementPresent("//div[@id='admin-page-content']/h2[@id='HSomeHeading']/span");
// Fields
String fullName = space + "." + page;
String form = "//div[@id='admin-page-content']/form[@action='/xwiki/bin/save/" + space + "/" + page + "']";
assertElementPresent(form + "/fieldset/dl/dt[1]/label");
assertElementPresent(form + "/fieldset/dl/dd[1]/input[@name='" + fullName + "_0_String']");
assertElementPresent(form + "/fieldset/dl/dt[2]/label");
assertElementPresent(form + "/fieldset/dl/dd[2]/select[@name='" + fullName + "_0_Boolean']");
assertElementPresent(form + "/fieldset/dl/dt[3]/label");
assertElementPresent(form + "/fieldset/dl/dd[3]/textarea[@name='" + fullName + "_0_TextArea']");
assertElementPresent(form + "/fieldset/dl/dt[4]/label");
assertElementPresent(form + "/fieldset/dl/dd[4]/select[@name='" + fullName + "_0_Select']");
assertElementPresent(form + "/fieldset/input[@id='" + fullName + "_redirect']");
assertElementPresent(form + "/fieldset/input[@value='" + getSelenium().getLocation() + "'][@name='xredirect']");
// JavaScript injects a save button outside of the form and removes the default save button.
waitForElement("//*[@class = 'admin-buttons']//input[@type = 'submit' and @value = 'Save']");
}
/*
* Will fail if it detects a configuration of the type created by createConfigurableApplication.
* Tests: XWiki.ConfigurableClass
*/
public void assertConfigurationNotPresent(String space, String page)
{
assertElementNotPresent("//div[@id='admin-page-content']/h1[@id='HCustomize" + space + "." + page + ":']/span");
assertElementNotPresent("//div[@id='admin-page-content']/h2[@id='HSomeHeading']/span");
assertConfigurationNotEditable(space, page);
}
public void assertConfigurationNotEditable(String space, String page)
{
assertElementNotPresent("//div[@id='admin-page-content']/form[@action='/xwiki/bin/save/"
+ space + "/" + page + "']");
}
/**
* Creates a new page with a configuration class with some simple fields<br/>
* then adds an object of class configurable and one of it's own class.<br/>
* Tests: XWiki.ConfigurableClass
*/
public void createConfigurableApplication(String space, String page, String section, boolean global)
{
// We have to use an existing space because the copy page form doesn't allow entering a new space.
String storageSpace = "Sandbox";
String storagePage = "CreateConfigurableApplication";
if (!tryToCopyPage(storageSpace, storagePage, space, page)) {
// Create the page with a simple configuration class.
createPage(space, page, "Test configurable application.", "xwiki/2.0");
open(space, page, "edit", "editor=class");
setFieldValue("propname", "String");
setFieldValue("proptype", "com.xpn.xwiki.objects.classes.StringClass");
clickButtonAndContinue("//input[@name='action_propadd']");
setFieldValue("propname", "Boolean");
setFieldValue("proptype", "com.xpn.xwiki.objects.classes.BooleanClass");
clickButtonAndContinue("//input[@name='action_propadd']");
setFieldValue("propname", "TextArea");
setFieldValue("proptype", "com.xpn.xwiki.objects.classes.TextAreaClass");
clickButtonAndContinue("//input[@name='action_propadd']");
setFieldValue("propname", "Select");
setFieldValue("proptype", "com.xpn.xwiki.objects.classes.StaticListClass");
clickButtonAndContinue("//input[@name='action_propadd']");
// Go to the object section.
open(space, page, "edit", "editor=object");
// Add a configurable object which points to the new class as the configuration class.
setFieldValue("classname", "XWiki.ConfigurableClass");
clickButtonAndContinue("//input[@name='action_objectadd']");
clickEditSaveAndView();
// Try to place it in the storage area.
tryToCopyPage(space, page, storageSpace, storagePage);
}
// Go to the object section.
open(space, page, "edit", "editor=object");
// Add an object of the new class.
waitForElement("classname");
setFieldValue("classname", space + "." + page);
clickButtonAndContinue("//input[@name='action_objectadd']");
setFieldValue("XWiki.ConfigurableClass_0_displayInSection", section);
setFieldValue("XWiki.ConfigurableClass_0_heading", "Some Heading");
setFieldValue("XWiki.ConfigurableClass_0_configurationClass", space + "." + page);
if (global == true) {
getSelenium().check("XWiki.ConfigurableClass_0_configureGlobally");
} else {
getSelenium().uncheck("XWiki.ConfigurableClass_0_configureGlobally");
}
// We won't set linkPrefix, propertiesToShow, codeToExecute, or iconAttachment.
clickEditSaveAndView();
}
/**
* This is used by createConfigurableApplication to store a copy of the default configurable to speed up making
* them.
*/
public boolean tryToCopyPage(String fromSpace, String fromPage, String toSpace, String toPage)
{
open(fromSpace, fromPage);
if (!isExistingPage()) {
return false;
}
return copyPage(fromSpace, fromPage, toSpace, toPage);
}
}
| xwiki-enterprise-test/xwiki-enterprise-test-selenium/src/test/it/org/xwiki/test/selenium/AdministrationTest.java | /*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.xwiki.test.selenium;
import junit.framework.Test;
import org.xwiki.test.selenium.framework.AbstractXWikiTestCase;
import org.xwiki.test.selenium.framework.ColibriSkinExecutor;
import org.xwiki.test.selenium.framework.XWikiTestSuite;
/**
* Verify the overall Administration application features.
*
* @version $Id$
*/
public class AdministrationTest extends AbstractXWikiTestCase
{
public static Test suite()
{
XWikiTestSuite suite = new XWikiTestSuite("Verify the Administration application features.");
suite.addTestSuite(AdministrationTest.class, ColibriSkinExecutor.class);
return suite;
}
@Override
public void setUp() throws Exception
{
super.setUp();
loginAsAdmin();
}
/**
* This method makes the following tests :
*
* <ul>
* <li>Login as global admin.</li>
* <li>Validate presence of default sections for global and space sections.</li>
* <li>Validate presence of application administration sections at global level only.</li>
* </ul>
*/
public void testGlobalAndSpaceSections()
{
clickLinkWithText("Administer Wiki");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Editing')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Localization')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Email')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Presentation')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Elements')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Registration')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Users')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Groups')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Rights')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Registration')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Import')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Export')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Templates')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=MessageStream')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Panels.PanelWizard')]");
// select space administration
getSelenium().select("goto-select", "label=Main");
waitPage();
assertElementNotPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Editing')]");
assertElementNotPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Localization')]");
assertElementNotPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Email')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Presentation')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Elements')]");
assertElementNotPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Registration')]");
assertElementNotPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Users')]");
assertElementNotPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Groups')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Rights')]");
assertElementNotPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Registration')]");
assertElementNotPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Import')]");
assertElementNotPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Export')]");
assertElementNotPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Templates')]");
assertElementNotPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=MessageStream')]");
assertElementPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Panels.PanelWizard')]");
}
/*
* Test to see an application page is included only if that application exists
*/
public void testApplicationSection()
{
// Delete the Blog.Categories page and test it's not present in the admin global menu anymore
deletePage("XWiki", "SearchAdmin");
clickLinkWithText("Administer Wiki");
assertElementNotPresent("//*[contains(@class, 'admin-menu')]//a[contains(@href, 'section=Search')]");
restorePage("XWiki", "SearchAdmin");
}
/*
* Test modifying XWiki.XWikiPreferences multi-language field and save it.
*/
public void testSettingXWikiPreferences()
{
clickLinkWithText("Administer Wiki");
getSelenium().select("goto-select", "label=Wiki administration");
clickLinkWithXPath("//a[text()='Localization']", true);
getSelenium().select("//select[@name='XWiki.XWikiPreferences_0_multilingual']", "label=Yes");
clickLinkWithXPath("//input[@value='Save']", true);
assertElementPresent("//span[@id='headerlanguages']");
}
/*
* Test adding a new category in Blog Categories
*/
/* Disabled until the new blog can insert its own administration page.
public void testBlogAdmin()
{
open("XWiki", "XWikiPreferences", "admin");
// select global administration
clickLinkWithLocator("//span[text()='General']", true);
getSelenium().select("//select[@id='XWiki.XWikiPreferences_0_editor']", "label=Text");
clickLinkWithLocator("//input[@value='Save']");
assertElementPresent("//span[@id='showsectionswrapper']");
clickLinkWithXPath("//a[@id='showsections']", false);
assertElementPresent("//span[@id='hidesectionswrapper']");
clickLinkWithLocator("//span[text()='Blog categories']");
setFieldValue("name", "New Category");
setFieldValue("description", "New Category Content");
clickLinkWithLocator("//input[@value='Add']", true);
assertTextPresent("New Category");
}*/
/*
* Test Panel Wizard
*/
public void testPanelsAdmin()
{
open("XWiki", "XWikiPreferences", "admin");
// test panel wizard at global level
clickLinkWithLocator("//a[text()='Panel Wizard']");
waitForCondition("selenium.page().bodyText().indexOf('Panel List')!=-1;");
clickLinkWithXPath("//a[@href='#PageLayoutSection']", false);
waitForCondition("selenium.isElementPresent(\"//div[@id='bothcolumns']\")!=false;");
clickLinkWithXPath("//div[@id='bothcolumns']", false);
waitForCondition("selenium.page().bodyText().indexOf('Page Layout')!=-1;");
clickLinkWithXPath("//a[@href='#PanelListSection']", false);
getSelenium().dragAndDropToObject("//div[@class='panel expanded QuickLinks']", "//div[@id='leftPanels']");
clickLinkWithXPath("//a[text()='Save the new layout']", false);
waitForCondition("selenium.isAlertPresent()");
assertEquals("The layout has been saved properly.", getSelenium().getAlert());
open("Main", "WebHome");
assertElementPresent("leftPanels");
assertElementPresent("rightPanels");
// Revert changes
open("XWiki", "XWikiPreferences", "admin");
clickLinkWithLocator("//a[text()='Panel Wizard']");
waitForCondition("selenium.page().bodyText().indexOf('Page Layout')!=-1;");
clickLinkWithXPath("//a[@href='#PageLayoutSection']", false);
waitForCondition("selenium.isElementPresent(\"//div[@id='rightcolumn']\")!=false;");
clickLinkWithXPath("//div[@id='rightcolumn']", false);
clickLinkWithXPath("//a[text()='Save the new layout']", false);
waitForCondition("selenium.isAlertPresent()");
assertEquals("The layout has been saved properly.", getSelenium().getAlert());
open("Main", "WebHome");
assertElementNotPresent("leftPanels");
assertElementPresent("rightPanels");
// test panel wizard at space level
open("TestPanelsAdmin", "WebHome", "edit", "editor=wiki");
setFieldValue("content", "aaa");
clickEditSaveAndView();
open("TestPanelsAdmin", "WebPreferences", "admin");
clickLinkWithLocator("//a[text()='Panel Wizard']");
waitForCondition("selenium.page().bodyText().indexOf('Page Layout')!=-1;");
clickLinkWithXPath("//a[@href='#PageLayoutSection']", false);
waitForCondition("selenium.isElementPresent(\"//div[@id='leftcolumn']\")!=false;");
clickLinkWithXPath("//div[@id='leftcolumn']", false);
waitForCondition("selenium.page().bodyText().indexOf('Panel List')!=-1;");
clickLinkWithXPath("//a[@href='#PanelListSection']", false);
getSelenium().dragAndDropToObject("//div[@class='panel expanded QuickLinks']", "//div[@id='leftPanels']");
clickLinkWithXPath("//a[text()='Save the new layout']", false);
waitForCondition("selenium.isAlertPresent()");
assertEquals("The layout has been saved properly.", getSelenium().getAlert());
open("TestPanelsAdmin", "WebHome");
assertElementPresent("leftPanels");
assertElementPresent("//div[@class='panel expanded QuickLinks']");
open("XWiki", "WebHome");
assertElementNotPresent("leftPanels");
assertElementNotPresent("//div[@class='panel expanded QuickLinks']");
}
/*
* Test add configurable application to existing section.
*
* This test depends on the "Presentation" section existing.
* Tests: XWiki.ConfigurableClass
*/
public void testAddConfigurableApplicationInExistingSection()
{
// Create the configurable for global admin.
createConfigurableApplication("Main", "TestConfigurable", "Presentation", true);
// Check it's available in global section.
open("XWiki", "XWikiPreferences", "admin", "editor=globaladmin§ion=Presentation");
assertConfigurationPresent("Main", "TestConfigurable");
// Check it's not available in space section.
open("Main", "WebPreferences", "admin", "editor=spaceadmin§ion=Presentation");
assertConfigurationNotPresent("Main", "TestConfigurable");
// Switch application to non-global
open("Main", "TestConfigurable", "edit", "editor=object");
getSelenium().uncheck("XWiki.ConfigurableClass_0_configureGlobally");
clickEditSaveAndView();
// Check that it is available in space section.
open("Main", "WebPreferences", "admin", "editor=spaceadmin§ion=Presentation");
assertConfigurationPresent("Main", "TestConfigurable");
// Check that it's not available in another space.
open("XWiki", "WebPreferences", "admin", "editor=spaceadmin§ion=Presentation");
assertConfigurationNotPresent("Main", "TestConfigurable");
// Check that it's not available in global section.
open("XWiki", "XWikiPreferences", "admin", "editor=globaladmin§ion=Presentation");
assertConfigurationNotPresent("Main", "TestConfigurable");
}
/**
* Test add configurable application to a nonexistent section.
* <p>
* This test depends on the "HopingThereIsNoSectionByThisName" section not existing.<br/>
* Tests: XWiki.ConfigurableClass
*/
public void testAddConfigurableApplicationInNonexistantSection()
{
String section = "HopingThereIsNoSectionByThisName";
// Create the configurable for global admin.
createConfigurableApplication("Main", "TestConfigurable", section, true);
// Check it's available in global section.
clickLinkWithText("Administer Wiki");
assertTrue(isAdminMenuItemPresent(section));
clickLinkWithText(section);
assertConfigurationPresent("Main", "TestConfigurable");
// Check that it's not available in space section.
open("Main", "WebPreferences", "admin");
// Assert there is no menu item in the administration menu for our configurable application.
assertFalse(isAdminMenuItemPresent(section));
}
/**
* Fails if a user can create a Configurable application without having edit access to the configuration page (in
* this case: XWikiPreferences)
* <p>
* Tests: XWiki.ConfigurableClass
*/
public void testConfigurableCreatedByUnauthorizedWillNotExecute()
{
// Make sure the configurable page doesn't exist because otherwise we may fail to overwrite it with a
// non-administrator user.
deletePage("Main", "TestConfigurable");
// Create the configurable for global administrator.
loginAndRegisterUser("anotherJoker", "bentOnMalice", false);
String nonExistingSection = "HopingThereIsNoSectionByThisName";
createConfigurableApplication("Main", "TestConfigurable", nonExistingSection, true);
loginAsAdmin();
open("XWiki", "XWikiPreferences", "admin", "editor=globaladmin§ion=" + nonExistingSection);
assertConfigurationNotEditable("Main", "TestConfigurable");
}
/*
* Creates a document with 2 configurable objects, one gets configured globally in one section and displays
* 2 configuration fields, the other is configured in the space in another section and displays the other 2
* fields. Fails if they are not displayed as they should be.
*
* Tests: XWiki.ConfigurableClass
*/
public void testApplicationConfiguredInMultipleSections()
{
String space = "Main";
String page = "TestConfigurable";
createConfigurableApplication(space, page, "TestSection1", true);
open(space, page, "edit", "editor=object");
// Add a second configurable object.
setFieldValue("classname", "XWiki.ConfigurableClass");
clickButtonAndContinue("//input[@name='action_objectadd']");
setFieldValue("XWiki.ConfigurableClass_1_displayInSection", "TestSection2");
setFieldValue("XWiki.ConfigurableClass_1_heading", "Some Other Heading");
setFieldValue("XWiki.ConfigurableClass_1_configurationClass", space + "." + page);
getSelenium().uncheck("XWiki.ConfigurableClass_1_configureGlobally");
// Set propertiesToShow so that each config only shows half of the properties.
setFieldValue("XWiki.ConfigurableClass_1_propertiesToShow", "TextArea, Select");
setFieldValue("XWiki.ConfigurableClass_0_propertiesToShow", "String, Boolean");
clickEditSaveAndView();
// Assert that half of the configuration shows up but not the other half.
open("XWiki", "XWikiPreferences", "admin", "editor=globaladmin§ion=TestSection1");
assertElementPresent("//div[@id='admin-page-content']/h2[@id='HSomeHeading']/span");
// Fields
String fullName = space + "." + page;
String form = "//div[@id='admin-page-content']/form[@action='/xwiki/bin/save/" + space + "/" + page + "']";
assertElementPresent(form + "/fieldset//label['String']");
assertElementPresent(form + "/fieldset//input[@name='" + fullName + "_0_String']");
assertElementPresent(form + "/fieldset//label['Boolean']");
assertElementPresent(form + "/fieldset//select[@name='" + fullName + "_0_Boolean']");
assertElementPresent(form + "/fieldset/input[@id='" + fullName + "_redirect']");
// xredirect
assertElementPresent(form + "/fieldset/input[@value='" + getSelenium().getLocation() + "'][@name='xredirect']");
// Save button
// assertElementPresent(form + "/div/p/span/input[@type='submit']");
// Javascript injects a save button outside of the form and removes the default save button.
waitForElement("//div/div/p/span/input[@type='submit'][@value='Save']");
// Should not be here
assertElementNotPresent(form + "/fieldset//textarea[@name='" + fullName + "_0_TextArea']");
assertElementNotPresent(form + "/fieldset//select[@name='" + fullName + "_0_Select']");
// Now we go to where the other half of the configuration should be.
open("Main", "WebPreferences", "admin", "editor=spaceadmin§ion=TestSection2");
assertElementPresent("//h2[@id='HSomeOtherHeading']/span");
// Fields
assertElementPresent(form + "/fieldset//label");
assertElementPresent(form + "/fieldset//textarea[@name='" + fullName + "_0_TextArea']");
assertElementPresent(form + "/fieldset//select[@name='" + fullName + "_0_Select']");
assertElementPresent(form + "/fieldset/input[@id='" + fullName + "_redirect']");
// xredirect
assertElementPresent(form + "/fieldset/input[@value='" + getSelenium().getLocation() + "'][@name='xredirect']");
// Save button
// assertElementPresent(form + "/div/p/span/input[@type='submit']");
// Javascript injects a save button outside of the form and removes the default save button.
waitForElement("//div/div/p/span/input[@type='submit'][@value='Save']");
// Should not be here
assertElementNotPresent(form + "/fieldset//input[@name='" + fullName + "_0_String']");
assertElementNotPresent(form + "/fieldset//select[@name='" + fullName + "_0_Boolean']");
}
/*
* Make sure html macros and pre tags are not being stripped
* @see: http://jira.xwiki.org/jira/browse/XAADMINISTRATION-141
*
* Tests: XWiki.ConfigurableClass
*/
public void testNotStrippingHtmlMacros()
{
String space = "Main";
String page = "TestConfigurable";
String test = "{{html}} <pre> {{html clean=\"false\"}} </pre> {{/html}}";
String fullName = space + "." + page;
String form = "//div[@id='admin-page-content']/form[@action='/xwiki/bin/save/" + space + "/" + page + "']";
createConfigurableApplication(space, page, "TestSection1", true);
open(space, page, "edit", "editor=object");
setFieldValue(fullName + "_0_TextArea", test);
setFieldValue(fullName + "_0_String", test);
clickEditSaveAndView();
open("XWiki", "XWikiPreferences", "admin", "editor=globaladmin§ion=TestSection1");
waitForTextPresent(form + "/fieldset//textarea[@name='" + fullName + "_0_TextArea']", test);
// Getting content from an input field required getValue and not getText
assertTrue(getSelenium().getValue(form + "/fieldset//input[@name='" + fullName + "_0_String']").equals(test));
}
/*
* If a value is specified for linkPrefix, then a link is generated with linkPrefix + prettyName of the property from
* the configuration class.
* linkPrefix = "http://www.xwiki.org/bin/view/Main/"
* property prettyName = "WebHome"
* generated link should equal "http://www.xwiki.org/bin/view/Main/WebHome"
*
* Tests: XWiki.ConfigurableClass
*/
public void testLabelLinkGeneration()
{
String space = "Main";
String page = "TestConfigurable";
createConfigurableApplication(space, page, "TestSection3", true);
open(space, page, "edit", "editor=object");
setFieldValue("XWiki.ConfigurableClass_0_linkPrefix", "TheLinkPrefix");
clickEditSaveAndView();
open("XWiki", "XWikiPreferences", "admin", "editor=globaladmin§ion=TestSection3");
assertElementPresent("//form/fieldset//a[@href='TheLinkPrefixString']");
assertElementPresent("//form/fieldset//a[@href='TheLinkPrefixBoolean']");
assertElementPresent("//form/fieldset//a[@href='TheLinkPrefixTextArea']");
assertElementPresent("//form/fieldset//a[@href='TheLinkPrefixSelect']");
}
/*
* Fails unless XWiki.ConfigurableClass locks each page on view and unlocks any other configurable page.
* Also fails if codeToExecute is not being evaluated.
*
* Tests: XWiki.ConfigurableClass
*/
public void testLockingAndUnlocking()
{
String space = "Main";
String page1 = "TestConfigurable";
String page2 = "TestConfigurable2";
String isThisPageLocked = "{{velocity}}Is This Page Locked $doc.getLocked(){{/velocity}}";
createConfigurableApplication(space, page1, "TestSection4", true);
createConfigurableApplication(space, page2, "TestSection5", true);
open(space, page1, "edit", "editor=wiki");
setFieldValue("content", isThisPageLocked);
clickEditSaveAndView();
open(space, page2, "edit", "editor=wiki");
setFieldValue("content", isThisPageLocked);
clickEditSaveAndView();
// Now we go to the documents and see which is locked.
// Clear any locks by visiting the main page.
open("XWiki", "XWikiPreferences", "admin");
open("XWiki", "XWikiPreferences", "admin", "editor=globaladmin§ion=TestSection4");
// We have to switch user context without logging out, logging out removes all locks.
open(space, page1, "view");
open(getSelenium().getLocation().replaceAll("http://localhost", "http://127.0.0.1"));
assertTextPresent("Is This Page Locked true");
open(space, page2, "view");
open(getSelenium().getLocation().replaceAll("http://localhost", "http://127.0.0.1"));
assertTextNotPresent("Is This Page Locked true");
open("XWiki", "XWikiPreferences", "admin", "editor=globaladmin§ion=TestSection5");
open(space, page1, "view");
open(getSelenium().getLocation().replaceAll("http://localhost", "http://127.0.0.1"));
assertTextNotPresent("Is This Page Locked true");
open(space, page2, "view");
open(getSelenium().getLocation().replaceAll("http://localhost", "http://127.0.0.1"));
assertTextPresent("Is This Page Locked true");
}
/*
* If CodeToExecute is defined in a configurable app, then it should be evaluated.
* Also header should be evaluated and not just printed.
* If XWiki.ConfigurableClass is saved with programming rights, it should resave itself so that it doesn't have them.
*/
public void testCodeToExecutionAndAutoSandboxing()
{
String space = "Main";
String page = "TestConfigurable";
// Note: We are forced to use the silent notation because Selenium 2.20.0 doesn't escape properly the string
// passed to the Selenium.type() method and it seems ${...} has a special meaning, throwing an exception with
// the message "replacement is undefined". Escaping the value using backslash or doubling the { didn't work.
// See http://code.google.com/p/selenium/issues/detail?id=3510 .
String codeToExecute = "#set($code = 's sh')"
+ "Thi$!{code}ould be displayed."
+ "#if($xcontext.hasProgrammingRights())"
+ "This should not be displayed."
+ "#end";
String heading = "#set($code = 'his sho')"
+ "T$!{code}uld also be displayed.";
createConfigurableApplication(space, page, "TestSection6", true);
open(space, page, "edit", "editor=object");
setFieldValue("XWiki.ConfigurableClass_0_codeToExecute", codeToExecute);
setFieldValue("XWiki.ConfigurableClass_0_heading", heading);
setFieldValue("XWiki.ConfigurableClass_0_configurationClass", "");
clickEditSaveAndView();
// Our admin will foolishly save XWiki.ConfigurableClass, giving it programming rights.
open("XWiki", "ConfigurableClass", "edit", "editor=wiki");
// Since we modify ConfigurableClass, we must modify it back after to prevent polluting further tests.
// See the previous note about silent notation to understand why we perform a string replacement.
String originalContent = getFieldValue("content").replace("${", "$!{");
try {
setFieldValue("content", originalContent
+ "{{velocity}}Has Programming permission: $xcontext.hasProgrammingRights(){{/velocity}}");
clickEditSaveAndContinue();
// Now we look at the section for our configurable.
open("XWiki", "ConfigurableClass", "view", "editor=globaladmin§ion=TestSection6");
assertTextPresent("This should be displayed.");
assertTextPresent("This should also be displayed.");
assertTextNotPresent("This should not be displayed.");
assertTextPresent("Has Programming permission: false");
// Make sure javascript has not added a Save button.
assertElementNotPresent("//div/div/p/span/input[@type='submit'][@value='Save']");
} finally {
open("XWiki", "ConfigurableClass", "edit", "editor=wiki");
setFieldValue("content", originalContent);
clickEditSaveAndContinue();
}
}
/*
* Proves that ConfigurationClass#codeToExecute is not rendered inline even if there is no
* custom configuration class and the on;y content is custom content.
* Tests: XWiki.ConfigurableClass
*/
public void testCodeToExecuteNotInlineIfNoConfigurationClass()
{
String space = "Main";
String page = "TestConfigurable";
String test = "{{html}} <div> <p> hello </p> </div> {{/html}}";
open(space, page, "delete", "confirm=1");
createConfigurableApplication(space, page, "TestSection1", true);
open(space, page, "edit", "editor=object");
setFieldValue("XWiki.ConfigurableClass_0_configurationClass", "");
setFieldValue("XWiki.ConfigurableClass_0_codeToExecute", test);
clickEditSaveAndView();
open("XWiki", "XWikiPreferences", "admin", "editor=globaladmin§ion=TestSection1");
assertElementNotPresent("//span[@class='xwikirenderingerror']");
}
/*
* Proves that ConfigurationClass#codeToExecute is not rendered inline whether it's at the top of the
* form or inside of the form.
* Tests: XWiki.ConfigurableClass
*/
public void testCodeToExecuteNotInline()
{
String space = "Main";
String page = "TestConfigurable";
String test = "{{html}} <div> <p> hello </p> </div> {{/html}}";
createConfigurableApplication(space, page, "TestSection1", true);
open(space, page, "edit", "editor=object");
setFieldValue("classname", "XWiki.ConfigurableClass");
clickButtonAndContinue("//input[@name='action_objectadd']");
setFieldValue("XWiki.ConfigurableClass_0_codeToExecute", test);
setFieldValue("XWiki.ConfigurableClass_0_propertiesToShow", "String, Boolean");
setFieldValue("XWiki.ConfigurableClass_1_displayInSection", "TestSection1");
setFieldValue("XWiki.ConfigurableClass_1_configurationClass", space + "." + page);
setFieldValue("XWiki.ConfigurableClass_1_propertiesToShow", "TextArea, Select");
setFieldValue("XWiki.ConfigurableClass_1_codeToExecute", test);
getSelenium().check("XWiki.ConfigurableClass_1_configureGlobally");
clickEditSaveAndView();
open("XWiki", "XWikiPreferences", "admin", "editor=globaladmin§ion=TestSection1");
assertElementNotPresent("//span[@class='xwikirenderingerror']");
}
/**
* Test functionality of the ForgotUsername page:
* <ul>
* <li>A user can be found using correct email</li>
* <li>No user is found using wrong email</li>
* <li>Email text is properly escaped</li>
* </ul>
*/
public void testForgotUsername()
{
String space = "Test";
String page = "SQLTestPage";
String mail = "[email protected]"; // default Admin mail
String user = "Admin";
String badMail = "[email protected]";
// Ensure there is a page we will try to find using HQL injection
editInWikiEditor(space, page);
setFieldValue("title", page);
setFieldValue("content", page);
clickEditSaveAndView();
// test that it finds the correct user
open("XWiki", "ForgotUsername");
setFieldValue("e", mail);
submit("//input[@type='submit']"); // there are no other buttons
assertTextNotPresent("No account is registered using this email address");
assertElementPresent("//div[@id='xwikicontent']//strong[text()='" + user + "']");
// test that bad mail results in no results
open("XWiki", "ForgotUsername");
setFieldValue("e", badMail);
submit("//input[@type='submit']"); // there are no other buttons
assertTextPresent("No account is registered using this email address");
assertElementNotPresent("//div[@id='xwikicontent']//strong[@value='" + user + "']");
// XWIKI-4920 test that the email is properly escaped
open("XWiki", "ForgotUsername");
setFieldValue("e", "a' synta\\'x error");
submit("//input[@type='submit']"); // there are no other buttons
assertTextPresent("No account is registered using this email address");
assertTextNotPresent("Error");
}
/**
* Asserts that a menu item with the given label is present on the administration menu.
*/
public boolean isAdminMenuItemPresent(String label)
{
return isElementPresent("//*[contains(@class, 'admin-menu')]//a[. = '" + label + "']");
}
/*
* Fails if there is an administration icon for the named section.
* Must be in the administration app first.
* Tests: XWiki.ConfigurableClass
*/
public void assertConfigurationIconNotPresent(String section)
{
assertElementNotPresent("//div[contains(@class,'admin-menu')]//li[contains(@href,'section=" + section + "')]");
}
/**
* Will fail unless it detects a configuration of the type created by createConfigurableApplication.<br/>
* Tests: XWiki.ConfigurableClass
*/
public void assertConfigurationPresent(String space, String page)
{
assertElementPresent("//div[@id='admin-page-content']/h2[@id='HSomeHeading']/span");
// Fields
String fullName = space + "." + page;
String form = "//div[@id='admin-page-content']/form[@action='/xwiki/bin/save/" + space + "/" + page + "']";
assertElementPresent(form + "/fieldset/dl/dt[1]/label");
assertElementPresent(form + "/fieldset/dl/dd[1]/input[@name='" + fullName + "_0_String']");
assertElementPresent(form + "/fieldset/dl/dt[2]/label");
assertElementPresent(form + "/fieldset/dl/dd[2]/select[@name='" + fullName + "_0_Boolean']");
assertElementPresent(form + "/fieldset/dl/dt[3]/label");
assertElementPresent(form + "/fieldset/dl/dd[3]/textarea[@name='" + fullName + "_0_TextArea']");
assertElementPresent(form + "/fieldset/dl/dt[4]/label");
assertElementPresent(form + "/fieldset/dl/dd[4]/select[@name='" + fullName + "_0_Select']");
assertElementPresent(form + "/fieldset/input[@id='" + fullName + "_redirect']");
assertElementPresent(form + "/fieldset/input[@value='" + getSelenium().getLocation() + "'][@name='xredirect']");
// JavaScript injects a save button outside of the form and removes the default save button.
waitForElement("//*[@class = 'admin-buttons']//input[@type = 'submit' and @value = 'Save']");
}
/*
* Will fail if it detects a configuration of the type created by createConfigurableApplication.
* Tests: XWiki.ConfigurableClass
*/
public void assertConfigurationNotPresent(String space, String page)
{
assertElementNotPresent("//div[@id='admin-page-content']/h1[@id='HCustomize" + space + "." + page + ":']/span");
assertElementNotPresent("//div[@id='admin-page-content']/h2[@id='HSomeHeading']/span");
assertConfigurationNotEditable(space, page);
}
public void assertConfigurationNotEditable(String space, String page)
{
assertElementNotPresent("//div[@id='admin-page-content']/form[@action='/xwiki/bin/save/"
+ space + "/" + page + "']");
}
/**
* Creates a new page with a configuration class with some simple fields<br/>
* then adds an object of class configurable and one of it's own class.<br/>
* Tests: XWiki.ConfigurableClass
*/
public void createConfigurableApplication(String space, String page, String section, boolean global)
{
// We have to use an existing space because the copy page form doesn't allow entering a new space.
String storageSpace = "Sandbox";
String storagePage = "CreateConfigurableApplication";
if (!tryToCopyPage(storageSpace, storagePage, space, page)) {
// Create the page with a simple configuration class.
createPage(space, page, "Test configurable application.", "xwiki/2.0");
open(space, page, "edit", "editor=class");
setFieldValue("propname", "String");
setFieldValue("proptype", "com.xpn.xwiki.objects.classes.StringClass");
clickButtonAndContinue("//input[@name='action_propadd']");
setFieldValue("propname", "Boolean");
setFieldValue("proptype", "com.xpn.xwiki.objects.classes.BooleanClass");
clickButtonAndContinue("//input[@name='action_propadd']");
setFieldValue("propname", "TextArea");
setFieldValue("proptype", "com.xpn.xwiki.objects.classes.TextAreaClass");
clickButtonAndContinue("//input[@name='action_propadd']");
setFieldValue("propname", "Select");
setFieldValue("proptype", "com.xpn.xwiki.objects.classes.StaticListClass");
clickButtonAndContinue("//input[@name='action_propadd']");
// Go to the object section.
open(space, page, "edit", "editor=object");
// Add a configurable object which points to the new class as the configuration class.
setFieldValue("classname", "XWiki.ConfigurableClass");
clickButtonAndContinue("//input[@name='action_objectadd']");
clickEditSaveAndView();
// Try to place it in the storage area.
tryToCopyPage(space, page, storageSpace, storagePage);
}
// Go to the object section.
open(space, page, "edit", "editor=object");
// Add an object of the new class.
setFieldValue("classname", space + "." + page);
clickButtonAndContinue("//input[@name='action_objectadd']");
setFieldValue("XWiki.ConfigurableClass_0_displayInSection", section);
setFieldValue("XWiki.ConfigurableClass_0_heading", "Some Heading");
setFieldValue("XWiki.ConfigurableClass_0_configurationClass", space + "." + page);
if (global == true) {
getSelenium().check("XWiki.ConfigurableClass_0_configureGlobally");
} else {
getSelenium().uncheck("XWiki.ConfigurableClass_0_configureGlobally");
}
// We won't set linkPrefix, propertiesToShow, codeToExecute, or iconAttachment.
clickEditSaveAndView();
}
/**
* This is used by createConfigurableApplication to store a copy of the default configurable to speed up making
* them.
*/
public boolean tryToCopyPage(String fromSpace, String fromPage, String toSpace, String toPage)
{
open(fromSpace, fromPage);
if (!isExistingPage()) {
return false;
}
return copyPage(fromSpace, fromPage, toSpace, toPage);
}
}
| [misc] fixed test which was failing because the classname element couldn't be found.
| xwiki-enterprise-test/xwiki-enterprise-test-selenium/src/test/it/org/xwiki/test/selenium/AdministrationTest.java | [misc] fixed test which was failing because the classname element couldn't be found. |
|
Java | apache-2.0 | bcbba2a17dd0c092b00583972a735d23a851368d | 0 | AxonFramework/AxonFramework | /*
* Copyright (c) 2010-2018. Axon Framework
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.axonframework.eventsourcing;
import org.axonframework.eventhandling.DomainEventMessage;
import org.axonframework.eventhandling.EventMessage;
import org.axonframework.messaging.unitofwork.CurrentUnitOfWork;
import java.io.Serializable;
/**
* Snapshotter trigger mechanism that counts the number of events to decide when to create a snapshot. A snapshot is
* triggered when the number of events applied on an aggregate exceeds the given threshold.
*
* @author Allard Buijze
* @since 3.0
*/
public class EventCountSnapshotTriggerDefinition implements SnapshotTriggerDefinition {
private final Snapshotter snapshotter;
private final int threshold;
/**
* Initialized the SnapshotTriggerDefinition to threshold snapshots using the given {@code snapshotter}
* when {@code threshold} events have been applied to an Aggregate instance
*
* @param snapshotter the snapshotter to notify when a snapshot needs to be taken
* @param threshold the number of events that will threshold the creation of a snapshot event
*/
public EventCountSnapshotTriggerDefinition(Snapshotter snapshotter, int threshold) {
this.snapshotter = snapshotter;
this.threshold = threshold;
}
@Override
public SnapshotTrigger prepareTrigger(Class<?> aggregateType) {
return new EventCountSnapshotTrigger(snapshotter, aggregateType, threshold);
}
@Override
public SnapshotTrigger reconfigure(Class<?> aggregateType, SnapshotTrigger trigger) {
if (trigger instanceof EventCountSnapshotTrigger) {
((EventCountSnapshotTrigger) trigger).setSnapshotter(snapshotter);
return trigger;
}
return new EventCountSnapshotTrigger(snapshotter, aggregateType, threshold);
}
private static class EventCountSnapshotTrigger implements SnapshotTrigger, Serializable {
private final Class<?> aggregateType;
private final int threshold;
private transient Snapshotter snapshotter;
private int counter = 0;
public EventCountSnapshotTrigger(Snapshotter snapshotter, Class<?> aggregateType, int threshold) {
this.snapshotter = snapshotter;
this.aggregateType = aggregateType;
this.threshold = threshold;
}
@Override
public void eventHandled(EventMessage<?> msg) {
if (msg instanceof DomainEventMessage && ++counter >= threshold) {
if (CurrentUnitOfWork.isStarted()) {
CurrentUnitOfWork.get().onPrepareCommit(
u -> scheduleSnapshot((DomainEventMessage) msg));
} else {
scheduleSnapshot((DomainEventMessage) msg);
}
counter = 0;
}
}
protected void scheduleSnapshot(DomainEventMessage msg) {
snapshotter.scheduleSnapshot(aggregateType, msg.getAggregateIdentifier());
counter = 0;
}
@Override
public void initializationFinished() {
}
public void setSnapshotter(Snapshotter snapshotter) {
this.snapshotter = snapshotter;
}
}
}
| eventsourcing/src/main/java/org/axonframework/eventsourcing/EventCountSnapshotTriggerDefinition.java | /*
* Copyright (c) 2010-2018. Axon Framework
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.axonframework.eventsourcing;
import org.axonframework.eventhandling.DomainEventMessage;
import org.axonframework.eventhandling.EventMessage;
import org.axonframework.messaging.unitofwork.CurrentUnitOfWork;
import java.io.Serializable;
/**
* Snapshotter trigger mechanism that counts the number of events to decide when to create a snapshot. A snapshot is
* triggered when the number of events applied on an aggregate exceeds the given threshold.
*
* @author Allard Buijze
* @since 3.0
*/
public class EventCountSnapshotTriggerDefinition implements SnapshotTriggerDefinition {
private final Snapshotter snapshotter;
private final int threshold;
/**
* Initialized the SnapshotTriggerDefinition to threshold snapshots using the given {@code snapshotter}
* when {@code threshold} events have been applied to an Aggregate instance
*
* @param snapshotter the snapshotter to notify when a snapshot needs to be taken
* @param threshold the number of events that will threshold the creation of a snapshot event
*/
public EventCountSnapshotTriggerDefinition(Snapshotter snapshotter, int threshold) {
this.snapshotter = snapshotter;
this.threshold = threshold;
}
@Override
public SnapshotTrigger prepareTrigger(Class<?> aggregateType) {
return new EventCountSnapshotTrigger(snapshotter, aggregateType, threshold);
}
@Override
public SnapshotTrigger reconfigure(Class<?> aggregateType, SnapshotTrigger trigger) {
if (trigger instanceof EventCountSnapshotTrigger) {
((EventCountSnapshotTrigger) trigger).setSnapshotter(snapshotter);
return trigger;
}
return new EventCountSnapshotTrigger(snapshotter, aggregateType, threshold);
}
private static class EventCountSnapshotTrigger implements SnapshotTrigger, Serializable {
private final Class<?> aggregateType;
private final int threshold;
private transient Snapshotter snapshotter;
private int counter = 0;
public EventCountSnapshotTrigger(Snapshotter snapshotter, Class<?> aggregateType, int threshold) {
this.snapshotter = snapshotter;
this.aggregateType = aggregateType;
this.threshold = threshold;
}
@Override
public void eventHandled(EventMessage<?> msg) {
if (++counter >= threshold && msg instanceof DomainEventMessage) {
if (CurrentUnitOfWork.isStarted()) {
CurrentUnitOfWork.get().onPrepareCommit(
u -> scheduleSnapshot((DomainEventMessage) msg));
} else {
scheduleSnapshot((DomainEventMessage) msg);
}
counter = 0;
}
}
protected void scheduleSnapshot(DomainEventMessage msg) {
snapshotter.scheduleSnapshot(aggregateType, msg.getAggregateIdentifier());
counter = 0;
}
@Override
public void initializationFinished() {
}
public void setSnapshotter(Snapshotter snapshotter) {
this.snapshotter = snapshotter;
}
}
}
| As a snapshot trigger definition I want to count Domain Event Messages only.
| eventsourcing/src/main/java/org/axonframework/eventsourcing/EventCountSnapshotTriggerDefinition.java | As a snapshot trigger definition I want to count Domain Event Messages only. |
|
Java | apache-2.0 | b3c647e90987a3c95ec8899b941218bc9e16b750 | 0 | apache/portals-pluto,apache/portals-pluto,apache/portals-pluto | /*
* The Apache Software License, Version 1.1
*
* Copyright (c) 2003 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowlegement:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowlegement may appear in the software itself,
* if and wherever such third-party acknowlegements normally appear.
*
* 4. The names "The Jakarta Project", "Pluto", and "Apache Software
* Foundation" must not be used to endorse or promote products derived
* from this software without prior written permission. For written
* permission, please contact [email protected].
*
* 5. Products derived from this software may not be called "Apache"
* nor may "Apache" appear in their names without prior written
* permission of the Apache Group.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*/
/*
*/
package org.apache.pluto.core.impl;
import java.io.BufferedReader;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
import javax.portlet.PortalContext;
import javax.portlet.PortletMode;
import javax.portlet.PortletPreferences;
import javax.portlet.PortletRequest;
import javax.portlet.PortletSession;
import javax.portlet.WindowState;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpSession;
import org.apache.pluto.core.InternalPortletRequest;
import org.apache.pluto.factory.PortletObjectAccess;
import org.apache.pluto.om.window.PortletWindow;
import org.apache.pluto.services.information.DynamicInformationProvider;
import org.apache.pluto.services.information.InformationProviderAccess;
import org.apache.pluto.services.property.PropertyManager;
import org.apache.pluto.util.Enumerator;
import org.apache.pluto.util.NamespaceMapperAccess;
public abstract class PortletRequestImpl extends javax.servlet.http.HttpServletRequestWrapper
implements PortletRequest, InternalPortletRequest
{
private PortletWindow portletWindow;
/**
* Holds the portlet session
*/
private PortletSession portletSession;
private DynamicInformationProvider provider;
/**
* true if the HTTP-Body has been accessed
*/
private boolean bodyAccessed;
/**
* true if we are in an include call
*/
private boolean included;
public PortletRequestImpl(PortletWindow portletWindow,
javax.servlet.http.HttpServletRequest servletRequest)
{
super(servletRequest);
this.portletWindow = portletWindow;
provider = InformationProviderAccess.getDynamicProvider(_getHttpServletRequest());
}
// javax.portlet.PortletRequest implementation ------------------------------------------------
public boolean isWindowStateAllowed(WindowState state)
{
return provider.isWindowStateAllowed(state);
}
public boolean isPortletModeAllowed(PortletMode portletMode)
{
// check if portal supports portlet mode
boolean supported = provider.isPortletModeAllowed(portletMode);
// check if portlet supports portlet mode as well
if (supported)
{
supported = PortletModeHelper.isPortletModeAllowedByPortlet(portletWindow,
this._getHttpServletRequest().getContentType(),
portletMode);
}
return supported;
}
public PortletMode getPortletMode()
{
return provider.getPortletMode(portletWindow);
}
public WindowState getWindowState()
{
return provider.getWindowState(portletWindow);
}
// needs to be implemented in each subclass
public abstract PortletPreferences getPreferences();
public PortletSession getPortletSession()
{
return getPortletSession(true);
}
public PortletSession getPortletSession(boolean create)
{
// check if the session was invalidated
javax.servlet.http.HttpSession httpSession = this._getHttpServletRequest().getSession(false);
if ((portletSession != null) && (httpSession == null))
{
portletSession = null;
}
else if (httpSession != null)
{
create = true;
}
if (create && portletSession == null)
{
httpSession = this._getHttpServletRequest().getSession(create);
if (httpSession != null)
{
portletSession = PortletObjectAccess.getPortletSession(portletWindow, httpSession);
}
}
return portletSession;
}
public String getProperty(String name)
{
if (name == null)
{
throw new IllegalArgumentException("Attribute name == null");
}
// get properties from request header
String prop = this._getHttpServletRequest().getHeader(name);
if (prop == null)
{
// get properties from PropertyManager
Map map = PropertyManager.getRequestProperties(portletWindow, this._getHttpServletRequest());
if (map != null)
{
String[] properties = (String[]) map.get(name);
if ((properties != null) && (properties.length > 0))
{
prop = properties[0];
}
}
}
return prop;
}
public Enumeration getProperties(String name)
{
if (name == null)
{
throw new IllegalArgumentException("Property name == null");
}
Set v = new HashSet();
// get properties from request header
Enumeration props = this._getHttpServletRequest().getHeaders(name);
if (props != null)
{
while (props.hasMoreElements())
{
v.add(props.nextElement());
}
}
// get properties from PropertyManager
Map map = PropertyManager.getRequestProperties(portletWindow, this._getHttpServletRequest());
if (map != null)
{
String[] properties = (String[]) map.get(name);
if (properties != null)
{
// add properties to vector
for (int i=0;i<properties.length;i++)
{
v.add(properties[i]);
}
}
}
return new Enumerator(v.iterator());
}
public Enumeration getPropertyNames()
{
Set v = new HashSet();
// get properties from PropertyManager
Map map = PropertyManager.getRequestProperties(portletWindow, this._getHttpServletRequest());
if (map != null)
{
Iterator propsIter = map.keySet().iterator();
while (propsIter.hasNext())
{
v.add(propsIter.next());
}
}
// get properties from request header
Enumeration props = this._getHttpServletRequest().getHeaderNames();
if (props != null)
{
while (props.hasMoreElements())
{
v.add(props.nextElement());
}
}
return new Enumerator(v.iterator());
}
public PortalContext getPortalContext()
{
return PortletObjectAccess.getPortalContext();
}
public String getAuthType()
{
return this._getHttpServletRequest().getAuthType();
}
public String getContextPath()
{
return portletWindow.getPortletEntity().getPortletDefinition().getPortletApplicationDefinition().getWebApplicationDefinition().getContextRoot();
// we cannot use that because of a bug in tomcat
// return this._getHttpServletRequest().getContextPath();
}
public String getRemoteUser()
{
return this._getHttpServletRequest().getRemoteUser();
}
public java.security.Principal getUserPrincipal()
{
return this._getHttpServletRequest().getUserPrincipal();
}
public boolean isUserInRole(String role)
{
return this._getHttpServletRequest().isUserInRole(role);
}
public Object getAttribute(String name)
{
if (name == null)
{
throw new IllegalArgumentException("Attribute name == null");
}
Object attribute = this._getHttpServletRequest().getAttribute(
NamespaceMapperAccess.getNamespaceMapper().encode(portletWindow.getId(),name)
);
if (attribute==null)
{
// TBD, not sure, if this should be done for all attributes or only javax.servlet.
attribute = this._getHttpServletRequest().getAttribute(name);
}
return attribute;
}
public Enumeration getAttributeNames()
{
Enumeration attributes = this._getHttpServletRequest().getAttributeNames();
Vector portletAttributes = new Vector();
while (attributes.hasMoreElements())
{
String attribute = (String)attributes.nextElement();
String portletAttribute = NamespaceMapperAccess.getNamespaceMapper().decode(portletWindow.getId(),attribute);
if (portletAttribute!=null)
{ // it is in the portlet's namespace
portletAttributes.add(portletAttribute);
}
}
return portletAttributes.elements();
}
public String getParameter(String name)
{
if (name == null)
{
throw new IllegalArgumentException("Parameter name == null");
}
bodyAccessed = true;
Map parameters = this._getHttpServletRequest().getParameterMap();
String[] values = (String[])parameters.get(name);
if (values != null)
{
return values[0];
}
return null;
}
public java.util.Enumeration getParameterNames()
{
bodyAccessed = true;
Map parameters = this._getHttpServletRequest().getParameterMap();
return Collections.enumeration(parameters.keySet());
}
public String[] getParameterValues(String name)
{
if (name == null)
{
throw new IllegalArgumentException("Parameter name == null");
}
bodyAccessed = true;
Map parameters = this._getHttpServletRequest().getParameterMap();
return(String[])parameters.get(name);
}
public Map getParameterMap()
{
bodyAccessed = true;
return Collections.unmodifiableMap(this._getHttpServletRequest().getParameterMap());
}
public boolean isSecure()
{
return this._getHttpServletRequest().isSecure();
}
public void setAttribute(String name, Object o)
{
if (name == null)
{
throw new IllegalArgumentException("Attribute name == null");
}
if ( o == null)
{
this.removeAttribute(name);
}
else if (isNameReserved(name))
{
// Reserved names go directly in the underlying request
_getHttpServletRequest().setAttribute(name, o);
}
else
{
this._getHttpServletRequest().setAttribute(NamespaceMapperAccess.getNamespaceMapper().encode(portletWindow.getId(),name), o);
}
}
public void removeAttribute(String name)
{
if (name == null)
{
throw new IllegalArgumentException("Attribute name == null");
}
if (isNameReserved(name))
{
// Reserved names go directly in the underlying request
_getHttpServletRequest().removeAttribute(name);
}
else
{
this._getHttpServletRequest().
removeAttribute(NamespaceMapperAccess.getNamespaceMapper().encode(portletWindow.getId(), name));
}
}
public String getRequestedSessionId()
{
return this._getHttpServletRequest().getRequestedSessionId();
}
public boolean isRequestedSessionIdValid()
{
return this._getHttpServletRequest().isRequestedSessionIdValid();
}
public String getResponseContentType()
{
// get the default response content type from the container
String responseContentType = provider.getResponseContentType();
return responseContentType;
}
public Enumeration getResponseContentTypes()
{
// get the default response content types from the container
Iterator responseContentTypes = provider.getResponseContentTypes();
return new Enumerator(responseContentTypes);
}
public java.util.Locale getLocale()
{
return this._getHttpServletRequest().getLocale();
}
public Enumeration getLocales()
{
return this._getHttpServletRequest().getLocales();
}
public String getScheme()
{
return this._getHttpServletRequest().getScheme();
}
public String getServerName()
{
return this._getHttpServletRequest().getServerName();
}
public int getServerPort()
{
return this._getHttpServletRequest().getServerPort();
}
// --------------------------------------------------------------------------------------------
// org.apache.pluto.core.InternalPortletRequest implementation --------------------------------
public void lateInit(javax.servlet.http.HttpServletRequest webModuleServletRequest)
{
this.setRequest(webModuleServletRequest);
}
public PortletWindow getInternalPortletWindow()
{
return portletWindow;
}
public void setIncluded(boolean included)
{
this.included = included;
}
public boolean isIncluded()
{
return included;
}
// --------------------------------------------------------------------------------------------
// internal methods ---------------------------------------------------------------------------
private javax.servlet.http.HttpServletRequest _getHttpServletRequest()
{
return(javax.servlet.http.HttpServletRequest)super.getRequest();
}
/**
* Is this attribute name a reserved name (by the J2EE spec)?.
* Reserved names begin with "java." or "javax.".
*/
private boolean isNameReserved(String name)
{
return name.startsWith("java.") || name.startsWith("javax.");
}
// --------------------------------------------------------------------------------------------
// additional methods
// javax.servlet.http.HttpServletRequestWrapper
public java.lang.String getCharacterEncoding()
{
return this._getHttpServletRequest().getCharacterEncoding();
}
public java.lang.String getContentType()
{
if (included)
{
return null;
}
else
{
return this._getHttpServletRequest().getContentType();
}
}
public int getContentLength()
{
if (included)
{
return 0;
}
else
{
return _getHttpServletRequest().getContentLength();
}
}
public BufferedReader getReader() throws java.io.UnsupportedEncodingException,java.io.IOException
{
if (included)
{
return null;
}
else
{
// the super class will ensure that a IllegalStateException is thrown if getInputStream() was called earlier
BufferedReader reader = _getHttpServletRequest().getReader();
bodyAccessed = true;
return reader;
}
}
public Cookie[] getCookies()
{
//must be based on the PortletRequest property [!]
return this._getHttpServletRequest().getCookies();
}
public long getDateHeader(String name)
{
//must be based on the PortletRequest property [!]
return this._getHttpServletRequest().getDateHeader(name);
}
public String getHeader(String name)
{
//must be based on the PortletRequest property [!]
return this._getHttpServletRequest().getHeader(name);
}
public Enumeration getHeaders(String name)
{
//must be based on the PortletRequest property [!]
return this._getHttpServletRequest().getHeaders(name);
}
public Enumeration getHeaderNames()
{
//must be based on the PortletRequest property [!]
return this._getHttpServletRequest().getHeaderNames();
}
public int getIntHeader(String name)
{
//must be based on the PortletRequest property [!]
return this._getHttpServletRequest().getIntHeader(name);
}
public String getPathInfo()
{
String attr = (String)super.getAttribute("javax.servlet.include.path_info");
return(attr != null) ? attr
: super.getPathInfo();
}
public String getQueryString()
{
String attr = (String)super.getAttribute("javax.servlet.include.query_string");
return(attr != null) ? attr
: super.getQueryString();
}
public String getPathTranslated()
{
return null;
}
public String getRequestURI()
{
String attr = (String)super.getAttribute("javax.servlet.include.request_uri");
return(attr != null) ? attr
: super.getRequestURI();
}
public StringBuffer getRequestURL()
{
return null;
}
public String getServletPath()
{
String attr = (String)super.getAttribute("javax.servlet.include.servlet_path");
return(attr != null) ? attr
: super.getServletPath();
}
public HttpSession getSession(boolean create)
{
return this._getHttpServletRequest().getSession(true);
}
public HttpSession getSession()
{
return this._getHttpServletRequest().getSession();
}
public String getMethod()
{
// TBD
return this._getHttpServletRequest().getMethod();
}
public boolean isRequestedSessionIdFromURL()
{
// TBD
return this._getHttpServletRequest().isRequestedSessionIdFromURL();
}
public boolean isRequestedSessionIdFromUrl()
{
return this._getHttpServletRequest().isRequestedSessionIdFromUrl();
}
public boolean isRequestedSessionIdFromCookie()
{
return this._getHttpServletRequest().isRequestedSessionIdFromCookie();
}
public String getProtocol()
{
return null;
}
public String getRemoteAddr()
{
return null;
}
public String getRemoteHost()
{
return null;
}
public String getRealPath(String path)
{
return null;
}
public void setCharacterEncoding(String env) throws java.io.UnsupportedEncodingException
{
if (bodyAccessed)
{
throw new IllegalStateException("This method must not be called after the HTTP-Body was accessed !");
}
this._getHttpServletRequest().setCharacterEncoding(env);
return;
}
public javax.servlet.ServletInputStream getInputStream() throws java.io.IOException
{
if (included)
{
return null;
}
else
{
// the super class will ensure that a IllegalStateException is thrown if getReader() was called earlier
javax.servlet.ServletInputStream stream = _getHttpServletRequest().getInputStream();
bodyAccessed = true;
return stream;
}
}
public javax.servlet.RequestDispatcher getRequestDispatcher(String path)
{
return this._getHttpServletRequest().getRequestDispatcher(path);
}
}
| src/container/org/apache/pluto/core/impl/PortletRequestImpl.java | /*
* The Apache Software License, Version 1.1
*
* Copyright (c) 2003 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowlegement:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowlegement may appear in the software itself,
* if and wherever such third-party acknowlegements normally appear.
*
* 4. The names "The Jakarta Project", "Pluto", and "Apache Software
* Foundation" must not be used to endorse or promote products derived
* from this software without prior written permission. For written
* permission, please contact [email protected].
*
* 5. Products derived from this software may not be called "Apache"
* nor may "Apache" appear in their names without prior written
* permission of the Apache Group.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*/
/*
*/
package org.apache.pluto.core.impl;
import java.io.BufferedReader;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
import javax.portlet.PortalContext;
import javax.portlet.PortletMode;
import javax.portlet.PortletPreferences;
import javax.portlet.PortletRequest;
import javax.portlet.PortletSession;
import javax.portlet.WindowState;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpSession;
import org.apache.pluto.core.InternalPortletRequest;
import org.apache.pluto.factory.PortletObjectAccess;
import org.apache.pluto.om.window.PortletWindow;
import org.apache.pluto.services.information.DynamicInformationProvider;
import org.apache.pluto.services.information.InformationProviderAccess;
import org.apache.pluto.services.property.PropertyManager;
import org.apache.pluto.util.Enumerator;
import org.apache.pluto.util.NamespaceMapperAccess;
public abstract class PortletRequestImpl extends javax.servlet.http.HttpServletRequestWrapper
implements PortletRequest, InternalPortletRequest
{
private PortletWindow portletWindow;
/**
* Holds the portlet session
*/
private PortletSession portletSession;
private DynamicInformationProvider provider;
/**
* true if the HTTP-Body has been accessed
*/
private boolean bodyAccessed;
/**
* true if we are in an include call
*/
private boolean included;
public PortletRequestImpl(PortletWindow portletWindow,
javax.servlet.http.HttpServletRequest servletRequest)
{
super(servletRequest);
this.portletWindow = portletWindow;
provider = InformationProviderAccess.getDynamicProvider(_getHttpServletRequest());
}
// javax.portlet.PortletRequest implementation ------------------------------------------------
public boolean isWindowStateAllowed(WindowState state)
{
return provider.isWindowStateAllowed(state);
}
public boolean isPortletModeAllowed(PortletMode portletMode)
{
// check if portal supports portlet mode
boolean supported = provider.isPortletModeAllowed(portletMode);
// check if portlet supports portlet mode as well
if (supported)
{
supported = PortletModeHelper.isPortletModeAllowedByPortlet(portletWindow,
this._getHttpServletRequest().getContentType(),
portletMode);
}
return supported;
}
public PortletMode getPortletMode()
{
return provider.getPortletMode(portletWindow);
}
public WindowState getWindowState()
{
return provider.getWindowState(portletWindow);
}
// needs to be implemented in each subclass
public abstract PortletPreferences getPreferences();
public PortletSession getPortletSession()
{
return getPortletSession(true);
}
public PortletSession getPortletSession(boolean create)
{
// check if the session was invalidated
javax.servlet.http.HttpSession httpSession = this._getHttpServletRequest().getSession(false);
if ((portletSession != null) && (httpSession == null))
{
portletSession = null;
}
else if (httpSession != null)
{
create = true;
}
if (create && portletSession == null)
{
httpSession = this._getHttpServletRequest().getSession(create);
if (httpSession != null)
{
portletSession = PortletObjectAccess.getPortletSession(portletWindow, httpSession);
}
}
return portletSession;
}
public String getProperty(String name)
{
if (name == null)
{
throw new IllegalArgumentException("Attribute name == null");
}
// get properties from request header
String prop = this._getHttpServletRequest().getHeader(name);
if (prop == null)
{
// get properties from PropertyManager
Map map = PropertyManager.getRequestProperties(portletWindow, this._getHttpServletRequest());
if (map != null)
{
String[] properties = (String[]) map.get(name);
if ((properties != null) && (properties.length > 0))
{
prop = properties[0];
}
}
}
return prop;
}
public Enumeration getProperties(String name)
{
if (name == null)
{
throw new IllegalArgumentException("Property name == null");
}
Set v = new HashSet();
// get properties from request header
Enumeration props = this._getHttpServletRequest().getHeaders(name);
if (props != null)
{
while (props.hasMoreElements())
{
v.add(props.nextElement());
}
}
// get properties from PropertyManager
Map map = PropertyManager.getRequestProperties(portletWindow, this._getHttpServletRequest());
if (map != null)
{
String[] properties = (String[]) map.get(name);
if (properties != null)
{
// add properties to vector
for (int i=0;i<properties.length;i++)
{
v.add(properties[i]);
}
}
}
return new Enumerator(v.iterator());
}
public Enumeration getPropertyNames()
{
Set v = new HashSet();
// get properties from PropertyManager
Map map = PropertyManager.getRequestProperties(portletWindow, this._getHttpServletRequest());
if (map != null)
{
Iterator propsIter = map.keySet().iterator();
while (propsIter.hasNext())
{
v.add(propsIter.next());
}
}
// get properties from request header
Enumeration props = this._getHttpServletRequest().getHeaderNames();
if (props != null)
{
while (props.hasMoreElements())
{
v.add(props.nextElement());
}
}
return new Enumerator(v.iterator());
}
public PortalContext getPortalContext()
{
return PortletObjectAccess.getPortalContext();
}
public String getAuthType()
{
return this._getHttpServletRequest().getAuthType();
}
public String getContextPath()
{
return portletWindow.getPortletEntity().getPortletDefinition().getPortletApplicationDefinition().getWebApplicationDefinition().getContextRoot();
// we cannot use that because of a bug in tomcat
// return this._getHttpServletRequest().getContextPath();
}
public String getRemoteUser()
{
return this._getHttpServletRequest().getRemoteUser();
}
public java.security.Principal getUserPrincipal()
{
return this._getHttpServletRequest().getUserPrincipal();
}
public boolean isUserInRole(String role)
{
return this._getHttpServletRequest().isUserInRole(role);
}
public Object getAttribute(String name)
{
if (name == null)
{
throw new IllegalArgumentException("Attribute name == null");
}
Object attribute = this._getHttpServletRequest().getAttribute(
NamespaceMapperAccess.getNamespaceMapper().encode(portletWindow.getId(),name)
);
if (attribute==null)
{
// TBD, not sure, if this should be done for all attributes or only javax.servlet.
attribute = this._getHttpServletRequest().getAttribute(name);
}
return attribute;
}
public Enumeration getAttributeNames()
{
Enumeration attributes = this._getHttpServletRequest().getAttributeNames();
Vector portletAttributes = new Vector();
while (attributes.hasMoreElements())
{
String attribute = (String)attributes.nextElement();
String portletAttribute = NamespaceMapperAccess.getNamespaceMapper().decode(portletWindow.getId(),attribute);
if (portletAttribute!=null)
{ // it is in the portlet's namespace
portletAttributes.add(portletAttribute);
}
}
return portletAttributes.elements();
}
public String getParameter(String name)
{
if (name == null)
{
throw new IllegalArgumentException("Parameter name == null");
}
bodyAccessed = true;
Map parameters = this._getHttpServletRequest().getParameterMap();
String[] values = (String[])parameters.get(name);
if (values != null)
{
return values[0];
}
return null;
}
public java.util.Enumeration getParameterNames()
{
bodyAccessed = true;
Map parameters = this._getHttpServletRequest().getParameterMap();
return Collections.enumeration(parameters.keySet());
}
public String[] getParameterValues(String name)
{
if (name == null)
{
throw new IllegalArgumentException("Parameter name == null");
}
bodyAccessed = true;
Map parameters = this._getHttpServletRequest().getParameterMap();
return(String[])parameters.get(name);
}
public Map getParameterMap()
{
bodyAccessed = true;
return Collections.unmodifiableMap(this._getHttpServletRequest().getParameterMap());
}
public boolean isSecure()
{
return this._getHttpServletRequest().isSecure();
}
public void setAttribute(String name, Object o)
{
if (name == null)
{
throw new IllegalArgumentException("Attribute name == null");
}
if ( o == null)
{
this.removeAttribute(name);
}
else if (isNameReserved(name))
{
// Reserved names go directly in the underlying request
_getHttpServletRequest().setAttribute(name, o);
}
else
{
this._getHttpServletRequest().setAttribute(NamespaceMapperAccess.getNamespaceMapper().encode(portletWindow.getId(),name), o);
}
}
public void removeAttribute(String name)
{
if (name == null)
{
throw new IllegalArgumentException("Attribute name == null");
}
this._getHttpServletRequest().removeAttribute(
NamespaceMapperAccess.getNamespaceMapper().encode(portletWindow.getId(), name)
);
}
public String getRequestedSessionId()
{
return this._getHttpServletRequest().getRequestedSessionId();
}
public boolean isRequestedSessionIdValid()
{
return this._getHttpServletRequest().isRequestedSessionIdValid();
}
public String getResponseContentType()
{
// get the default response content type from the container
String responseContentType = provider.getResponseContentType();
return responseContentType;
}
public Enumeration getResponseContentTypes()
{
// get the default response content types from the container
Iterator responseContentTypes = provider.getResponseContentTypes();
return new Enumerator(responseContentTypes);
}
public java.util.Locale getLocale()
{
return this._getHttpServletRequest().getLocale();
}
public Enumeration getLocales()
{
return this._getHttpServletRequest().getLocales();
}
public String getScheme()
{
return this._getHttpServletRequest().getScheme();
}
public String getServerName()
{
return this._getHttpServletRequest().getServerName();
}
public int getServerPort()
{
return this._getHttpServletRequest().getServerPort();
}
// --------------------------------------------------------------------------------------------
// org.apache.pluto.core.InternalPortletRequest implementation --------------------------------
public void lateInit(javax.servlet.http.HttpServletRequest webModuleServletRequest)
{
this.setRequest(webModuleServletRequest);
}
public PortletWindow getInternalPortletWindow()
{
return portletWindow;
}
public void setIncluded(boolean included)
{
this.included = included;
}
public boolean isIncluded()
{
return included;
}
// --------------------------------------------------------------------------------------------
// internal methods ---------------------------------------------------------------------------
private javax.servlet.http.HttpServletRequest _getHttpServletRequest()
{
return(javax.servlet.http.HttpServletRequest)super.getRequest();
}
/**
* Is this attribute name a reserved name (by the J2EE spec)?.
* Reserved names begin with "java." or "javax.".
*/
private boolean isNameReserved(String name)
{
return name.startsWith("java.") || name.startsWith("javax.");
}
// --------------------------------------------------------------------------------------------
// additional methods
// javax.servlet.http.HttpServletRequestWrapper
public java.lang.String getCharacterEncoding()
{
return this._getHttpServletRequest().getCharacterEncoding();
}
public java.lang.String getContentType()
{
if (included)
{
return null;
}
else
{
return this._getHttpServletRequest().getContentType();
}
}
public int getContentLength()
{
if (included)
{
return 0;
}
else
{
return _getHttpServletRequest().getContentLength();
}
}
public BufferedReader getReader() throws java.io.UnsupportedEncodingException,java.io.IOException
{
if (included)
{
return null;
}
else
{
// the super class will ensure that a IllegalStateException is thrown if getInputStream() was called earlier
BufferedReader reader = _getHttpServletRequest().getReader();
bodyAccessed = true;
return reader;
}
}
public Cookie[] getCookies()
{
//must be based on the PortletRequest property [!]
return this._getHttpServletRequest().getCookies();
}
public long getDateHeader(String name)
{
//must be based on the PortletRequest property [!]
return this._getHttpServletRequest().getDateHeader(name);
}
public String getHeader(String name)
{
//must be based on the PortletRequest property [!]
return this._getHttpServletRequest().getHeader(name);
}
public Enumeration getHeaders(String name)
{
//must be based on the PortletRequest property [!]
return this._getHttpServletRequest().getHeaders(name);
}
public Enumeration getHeaderNames()
{
//must be based on the PortletRequest property [!]
return this._getHttpServletRequest().getHeaderNames();
}
public int getIntHeader(String name)
{
//must be based on the PortletRequest property [!]
return this._getHttpServletRequest().getIntHeader(name);
}
public String getPathInfo()
{
String attr = (String)super.getAttribute("javax.servlet.include.path_info");
return(attr != null) ? attr
: super.getPathInfo();
}
public String getQueryString()
{
String attr = (String)super.getAttribute("javax.servlet.include.query_string");
return(attr != null) ? attr
: super.getQueryString();
}
public String getPathTranslated()
{
return null;
}
public String getRequestURI()
{
String attr = (String)super.getAttribute("javax.servlet.include.request_uri");
return(attr != null) ? attr
: super.getRequestURI();
}
public StringBuffer getRequestURL()
{
return null;
}
public String getServletPath()
{
String attr = (String)super.getAttribute("javax.servlet.include.servlet_path");
return(attr != null) ? attr
: super.getServletPath();
}
public HttpSession getSession(boolean create)
{
return this._getHttpServletRequest().getSession(true);
}
public HttpSession getSession()
{
return this._getHttpServletRequest().getSession();
}
public String getMethod()
{
// TBD
return this._getHttpServletRequest().getMethod();
}
public boolean isRequestedSessionIdFromURL()
{
// TBD
return this._getHttpServletRequest().isRequestedSessionIdFromURL();
}
public boolean isRequestedSessionIdFromUrl()
{
return this._getHttpServletRequest().isRequestedSessionIdFromUrl();
}
public boolean isRequestedSessionIdFromCookie()
{
return this._getHttpServletRequest().isRequestedSessionIdFromCookie();
}
public String getProtocol()
{
return null;
}
public String getRemoteAddr()
{
return null;
}
public String getRemoteHost()
{
return null;
}
public String getRealPath(String path)
{
return null;
}
public void setCharacterEncoding(String env) throws java.io.UnsupportedEncodingException
{
if (bodyAccessed)
{
throw new IllegalStateException("This method must not be called after the HTTP-Body was accessed !");
}
this._getHttpServletRequest().setCharacterEncoding(env);
return;
}
public javax.servlet.ServletInputStream getInputStream() throws java.io.IOException
{
if (included)
{
return null;
}
else
{
// the super class will ensure that a IllegalStateException is thrown if getReader() was called earlier
javax.servlet.ServletInputStream stream = _getHttpServletRequest().getInputStream();
bodyAccessed = true;
return stream;
}
}
public javax.servlet.RequestDispatcher getRequestDispatcher(String path)
{
return this._getHttpServletRequest().getRequestDispatcher(path);
}
}
| fixed removeAttribute method to distinguish between different scopes. Fix was provided by Steven Parkes.
git-svn-id: d98179c662bf49ad5b85b094e2cd8f1aa3529901@35791 13f79535-47bb-0310-9956-ffa450edef68
| src/container/org/apache/pluto/core/impl/PortletRequestImpl.java | fixed removeAttribute method to distinguish between different scopes. Fix was provided by Steven Parkes. |
|
Java | apache-2.0 | be30a95655e82c17e8640f42e0995ddf1e022198 | 0 | cogfor/mcf-cogfor,gladyscarrizales/manifoldcf,gladyscarrizales/manifoldcf,cogfor/mcf-cogfor,cogfor/mcf-cogfor,cogfor/mcf-cogfor,gladyscarrizales/manifoldcf,gladyscarrizales/manifoldcf,gladyscarrizales/manifoldcf,cogfor/mcf-cogfor,gladyscarrizales/manifoldcf,cogfor/mcf-cogfor | /**
* Copyright 2014 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.manifoldcf.crawler.connectors.gridfs;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import com.mongodb.DBObject;
import com.mongodb.DBTCPConnector;
import com.mongodb.Mongo;
import com.mongodb.MongoClient;
import com.mongodb.gridfs.GridFS;
import com.mongodb.gridfs.GridFSDBFile;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.io.InputStream;
import java.net.UnknownHostException;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.manifoldcf.agents.interfaces.RepositoryDocument;
import org.apache.manifoldcf.agents.interfaces.ServiceInterruption;
import org.apache.manifoldcf.core.interfaces.ConfigParams;
import org.apache.manifoldcf.core.interfaces.Specification;
import org.apache.manifoldcf.core.interfaces.IHTTPOutput;
import org.apache.manifoldcf.core.interfaces.IPasswordMapperActivity;
import org.apache.manifoldcf.core.interfaces.IPostParameters;
import org.apache.manifoldcf.core.interfaces.IThreadContext;
import org.apache.manifoldcf.core.interfaces.ManifoldCFException;
import org.apache.manifoldcf.crawler.connectors.BaseRepositoryConnector;
import org.apache.manifoldcf.crawler.interfaces.IProcessActivity;
import org.apache.manifoldcf.crawler.interfaces.ISeedingActivity;
import org.apache.manifoldcf.crawler.interfaces.IExistingVersions;
import org.apache.manifoldcf.crawler.system.Logging;
import org.bson.types.ObjectId;
/**
*
* @author molgun
*/
public class GridFSRepositoryConnector extends BaseRepositoryConnector {
/**
* Activity name for the activity record.
*/
protected static final String ACTIVITY_FETCH = "fetch";
/**
* Server name for declaring bin name.
*/
protected static final String SERVER = "MongoDB - GridFS";
/**
* Session expiration milliseconds.
*/
protected static final long SESSION_EXPIRATION_MILLISECONDS = 30000L;
/**
* Endpoint username.
*/
protected String username = null;
/**
* Endpoint password.
*/
protected String password = null;
/**
* Endpoint host.
*/
protected String host = null;
/**
* Endpoint port.
*/
protected String port = null;
/**
* Endpoint db.
*/
protected String db = null;
/**
* Endpoint bucket.
*/
protected String bucket = null;
/**
* Endpoint url.
*/
protected String url = null;
/**
* Endpoint acl.
*/
protected String acl = null;
/**
* Endpoint denyAcl.
*/
protected String denyAcl = null;
/**
* MongoDB session.
*/
protected DB session = null;
/**
* Last session fetch time.
*/
protected long lastSessionFetch = -1L;
/**
* Forward to the javascript to check the configuration parameters.
*/
private static final String EDIT_CONFIG_HEADER_FORWARD = "editConfiguration.js";
/**
* Forward to the HTML template to view the configuration parameters.
*/
private static final String VIEW_CONFIG_FORWARD = "viewConfiguration.html";
/**
* Forward to the HTML template to edit the configuration parameters.
*/
private static final String EDIT_CONFIG_FORWARD_SERVER = "editConfiguration_Server.html";
/**
* GridFS server tab name.
*/
private static final String GRIDFS_SERVER_TAB_RESOURCE = "GridFSConnector.Server";
/**
* GridFS credentials tab name.
*/
private static final String GRIDFS_CREDENTIALS_TAB_RESOURCE = "GridFSConnector.Credentials";
/**
* Tab name parameter for managing the view of the Web UI.
*/
private static final String TAB_NAME_PARAM = "TabName";
/**
* Constructer.
*/
public GridFSRepositoryConnector() {
super();
}
/**
* Tell the world what model this connector uses for addSeedDocuments().
* This must return a model value as specified above. The connector does not
* have to be connected for this method to be called.
*
* @return the model type value.
*/
@Override
public String[] getBinNames(String documentIdentifier) {
return new String[]{SERVER};
}
/**
* Tell the world what model this connector uses for addSeedDocuments().
* This must return a model value as specified above. The connector does not
* have to be connected for this method to be called.
*
* @return the model type value.
*/
@Override
public int getConnectorModel() {
return super.getConnectorModel();
}
/**
* Return the list of activities that this connector supports (i.e. writes
* into the log). The connector does not have to be connected for this
* method to be called.
*
* @return the list.
*/
@Override
public String[] getActivitiesList() {
return new String[]{ACTIVITY_FETCH};
}
/**
* Connect.
*
* @param configParams is the set of configuration parameters, which in this
* case describe the root directory.
*/
@Override
public void connect(ConfigParams configParams) {
super.connect(configParams);
username = params.getParameter(GridFSConstants.USERNAME_PARAM);
password = params.getParameter(GridFSConstants.PASSWORD_PARAM);
host = params.getParameter(GridFSConstants.HOST_PARAM);
port = params.getParameter(GridFSConstants.PORT_PARAM);
db = params.getParameter(GridFSConstants.DB_PARAM);
bucket = params.getParameter(GridFSConstants.BUCKET_PARAM);
url = params.getParameter(GridFSConstants.URL_RETURN_FIELD_NAME_PARAM);
acl = params.getParameter(GridFSConstants.ACL_RETURN_FIELD_NAME_PARAM);
denyAcl = params.getParameter(GridFSConstants.DENY_ACL_RETURN_FIELD_NAME_PARAM);
}
/**
* Test the connection. Returns a string describing the connection
* integrity.
*
* @return the connection's status as a displayable string.
* @throws org.apache.manifoldcf.core.interfaces.ManifoldCFException
*/
@Override
public String check() throws ManifoldCFException {
try {
getSession();
if (session != null) {
Mongo currentMongoSession = session.getMongo();
DBTCPConnector currentTCPConnection = currentMongoSession.getConnector();
boolean status = currentTCPConnection.isOpen();
if (status) {
session.getMongo().close();
session = null;
return super.check();
} else {
session = null;
}
}
return "Not connected.";
} catch (ManifoldCFException e) {
return e.getMessage();
}
}
/**
* Close the connection. Call this before discarding this instance of the
* repository connector.
*
* @throws org.apache.manifoldcf.core.interfaces.ManifoldCFException
*/
@Override
public void disconnect() throws ManifoldCFException {
if (session != null) {
try {
session.getMongo().getConnector().close();
} catch (Exception e) {
Logging.connectors.error("GridFS: Error when trying to disconnect: " + e.getMessage());
throw new ManifoldCFException("GridFS: Error when trying to disconnect: " + e.getMessage(), e);
}
session = null;
lastSessionFetch = -1L;
username = null;
password = null;
host = null;
port = null;
db = null;
bucket = null;
url = null;
acl = null;
denyAcl = null;
}
}
/**
* This method is periodically called for all connectors that are connected
* but not in active use.
*
* @throws org.apache.manifoldcf.core.interfaces.ManifoldCFException
*/
@Override
public void poll() throws ManifoldCFException {
if (lastSessionFetch == -1L) {
return;
}
long currentTime = System.currentTimeMillis();
if (currentTime >= lastSessionFetch + SESSION_EXPIRATION_MILLISECONDS) {
if (session != null) {
session.getMongo().getConnector().close();
session = null;
}
lastSessionFetch = -1L;
}
}
/**
* This method is called to assess whether to count this connector instance
* should actually be counted as being connected.
*
* @return true if the connector instance is actually connected.
*/
@Override
public boolean isConnected() {
if (session == null) {
return false;
}
Mongo currentMongoSession = session.getMongo();
DBTCPConnector currentTCPConnection = currentMongoSession.getConnector();
return currentTCPConnection.isOpen();
}
/**
* Get the maximum number of documents to amalgamate together into one
* batch, for this connector.
*
* @return the maximum number. 0 indicates "unlimited".
*/
@Override
public int getMaxDocumentRequest() {
return super.getMaxDocumentRequest();
}
/**
* Return the list of relationship types that this connector recognizes.
*
* @return the list.
*/
@Override
public String[] getRelationshipTypes() {
return super.getRelationshipTypes();
}
/** Queue "seed" documents. Seed documents are the starting places for crawling activity. Documents
* are seeded when this method calls appropriate methods in the passed in ISeedingActivity object.
*
* This method can choose to find repository changes that happen only during the specified time interval.
* The seeds recorded by this method will be viewed by the framework based on what the
* getConnectorModel() method returns.
*
* It is not a big problem if the connector chooses to create more seeds than are
* strictly necessary; it is merely a question of overall work required.
*
* The end time and seeding version string passed to this method may be interpreted for greatest efficiency.
* For continuous crawling jobs, this method will
* be called once, when the job starts, and at various periodic intervals as the job executes.
*
* When a job's specification is changed, the framework automatically resets the seeding version string to null. The
* seeding version string may also be set to null on each job run, depending on the connector model returned by
* getConnectorModel().
*
* Note that it is always ok to send MORE documents rather than less to this method.
* The connector will be connected before this method can be called.
*@param activities is the interface this method should use to perform whatever framework actions are desired.
*@param spec is a document specification (that comes from the job).
*@param seedTime is the end of the time range of documents to consider, exclusive.
*@param lastSeedVersionString is the last seeding version string for this job, or null if the job has no previous seeding version string.
*@param jobMode is an integer describing how the job is being run, whether continuous or once-only.
*@return an updated seeding version string, to be stored with the job.
*/
@Override
public String addSeedDocuments(ISeedingActivity activities, Specification spec,
String lastSeedVersion, long seedTime, int jobMode)
throws ManifoldCFException, ServiceInterruption {
getSession();
DBCollection fsFiles = session.getCollection(
bucket + GridFSConstants.COLLECTION_SEPERATOR + GridFSConstants.FILES_COLLECTION_NAME
);
DBCursor dnc = fsFiles.find();
while (dnc.hasNext()) {
DBObject dbo = dnc.next();
String _id = dbo.get("_id").toString();
activities.addSeedDocument(_id);
if (Logging.connectors.isDebugEnabled()) {
Logging.connectors.debug("GridFS: Document _id = " + _id + " added to queue");
}
}
return "";
}
/** Process a set of documents.
* This is the method that should cause each document to be fetched, processed, and the results either added
* to the queue of documents for the current job, and/or entered into the incremental ingestion manager.
* The document specification allows this class to filter what is done based on the job.
* The connector will be connected before this method can be called.
*@param documentIdentifiers is the set of document identifiers to process.
*@param statuses are the currently-stored document versions for each document in the set of document identifiers
* passed in above.
*@param activities is the interface this method should use to queue up new document references
* and ingest documents.
*@param jobMode is an integer describing how the job is being run, whether continuous or once-only.
*@param usesDefaultAuthority will be true only if the authority in use for these documents is the default one.
*/
@Override
public void processDocuments(String[] documentIdentifiers, IExistingVersions statuses, Specification spec,
IProcessActivity activities, int jobMode, boolean usesDefaultAuthority)
throws ManifoldCFException, ServiceInterruption {
for (String documentIdentifier : documentIdentifiers) {
String versionString;
GridFS gfs;
GridFSDBFile document;
getSession();
String _id = documentIdentifier;
gfs = new GridFS(session, bucket);
document = gfs.findOne(new ObjectId(_id));
if (document == null) {
activities.deleteDocument(documentIdentifier);
continue;
} else {
DBObject metadata = document.getMetaData();
versionString = document.getMD5() + "+" + metadata != null
? Integer.toString(metadata.hashCode())
: StringUtils.EMPTY;
}
if (versionString.length() == 0 || activities.checkDocumentNeedsReindexing(documentIdentifier,versionString)) {
long startTime = System.currentTimeMillis();
String errorCode = "OK";
String errorDesc = null;
String version = versionString;
if (Logging.connectors.isDebugEnabled()) {
Logging.connectors.debug("GridFS: Processing document _id = " + _id);
}
DBObject metadata = document.getMetaData();
if (metadata == null) {
Logging.connectors.warn("GridFS: Document " + _id + " has a null metadata - skipping.");
activities.noDocument(_id,version);
continue;
}
String urlValue = document.getMetaData().get(this.url) == null
? StringUtils.EMPTY
: document.getMetaData().get(this.url).toString();
if (!StringUtils.isEmpty(urlValue)) {
boolean validURL;
try {
new java.net.URI(urlValue);
validURL = true;
} catch (java.net.URISyntaxException e) {
validURL = false;
}
if (validURL) {
long fileLenght = document.getLength();
Date createdDate = document.getUploadDate();
String fileName = document.getFilename();
String mimeType = document.getContentType();
if (!activities.checkURLIndexable(urlValue))
{
Logging.connectors.warn("GridFS: Document " + _id + " has a URL excluded by the output connector ('" + urlValue + "') - skipping.");
activities.noDocument(_id, version);
continue;
}
if (!activities.checkLengthIndexable(fileLenght))
{
Logging.connectors.warn("GridFS: Document " + _id + " has a length excluded by the output connector (" + fileLenght + ") - skipping.");
activities.noDocument(_id, version);
continue;
}
if (!activities.checkMimeTypeIndexable(mimeType))
{
Logging.connectors.warn("GridFS: Document " + _id + " has a mime type excluded by the output connector ('" + mimeType + "') - skipping.");
activities.noDocument(_id, version);
continue;
}
if (!activities.checkDateIndexable(createdDate))
{
Logging.connectors.warn("GridFS: Document " + _id + " has a date excluded by the output connector (" + createdDate + ") - skipping.");
activities.noDocument(_id, version);
continue;
}
RepositoryDocument rd = new RepositoryDocument();
rd.setCreatedDate(createdDate);
rd.setModifiedDate(createdDate);
rd.setFileName(fileName);
rd.setMimeType(mimeType);
String[] aclsArray = null;
String[] denyAclsArray = null;
if (acl != null) {
try {
Object aclObject = document.getMetaData().get(acl);
if (aclObject != null) {
List<String> acls = (List<String>) aclObject;
aclsArray = (String[]) acls.toArray();
}
} catch (ClassCastException e) {
// This is bad because security will fail
Logging.connectors.warn("GridFS: Document " + _id + " metadata ACL field doesn't contain List<String> type.");
throw new ManifoldCFException("Security decoding error: "+e.getMessage(),e);
}
}
if (denyAcl != null) {
try {
Object denyAclObject = document.getMetaData().get(denyAcl);
if (denyAclObject != null) {
List<String> denyAcls = (List<String>) denyAclObject;
denyAcls.add(GLOBAL_DENY_TOKEN);
denyAclsArray = (String[]) denyAcls.toArray();
}
} catch (ClassCastException e) {
// This is bad because security will fail
Logging.connectors.warn("GridFS: Document " + _id + " metadata DenyACL field doesn't contain List<String> type.");
throw new ManifoldCFException("Security decoding error: "+e.getMessage(),e);
}
}
rd.setSecurity(RepositoryDocument.SECURITY_TYPE_DOCUMENT,aclsArray,denyAclsArray);
InputStream is = document.getInputStream();
try {
rd.setBinary(is, fileLenght);
try {
activities.ingestDocumentWithException(_id, version, urlValue, rd);
} catch (IOException e) {
handleIOException(e);
}
} finally {
try {
is.close();
} catch (IOException e) {
handleIOException(e);
}
}
gfs.getDB().getMongo().getConnector().close();
session = null;
activities.recordActivity(startTime, ACTIVITY_FETCH,
fileLenght, _id, errorCode, errorDesc, null);
} else {
Logging.connectors.warn("GridFS: Document " + _id + " has a invalid URL: " + urlValue + " - skipping.");
activities.noDocument(_id,version);
}
} else {
Logging.connectors.warn("GridFS: Document " + _id + " has a null URL - skipping.");
activities.noDocument(_id,version);
}
}
}
}
protected static void handleIOException(IOException e) throws ManifoldCFException, ServiceInterruption {
if (e instanceof InterruptedIOException) {
throw new ManifoldCFException(e.getMessage(), e, ManifoldCFException.INTERRUPTED);
} else {
throw new ManifoldCFException(e.getMessage(), e);
}
}
/**
* Output the configuration header section. This method is called in the
* head section of the connector's configuration page. Its purpose is to add
* the required tabs to the list, and to output any javascript methods that
* might be needed by the configuration editing HTML. The connector does not
* need to be connected for this method to be called.
*
* @param threadContext is the local thread context.
* @param out is the output to which any HTML should be sent.
* @param parameters are the configuration parameters, as they currently
* exist, for this connection being configured.
* @param tabsArray is an array of tab names. Add to this array any tab
* names that are specific to the connector.
* @throws org.apache.manifoldcf.core.interfaces.ManifoldCFException
* @throws java.io.IOException
*/
@Override
public void outputConfigurationHeader(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters, List<String> tabsArray) throws ManifoldCFException, IOException {
tabsArray.add(Messages.getString(locale, GRIDFS_SERVER_TAB_RESOURCE));
tabsArray.add(Messages.getString(locale, GRIDFS_CREDENTIALS_TAB_RESOURCE));
Map<String, String> paramMap = new HashMap<String, String>();
fillInServerParameters(paramMap, out, parameters);
Messages.outputResourceWithVelocity(out, locale, EDIT_CONFIG_HEADER_FORWARD, paramMap, true);
}
/**
* Output the configuration body section. This method is called in the body
* section of the connector's configuration page. Its purpose is to present
* the required form elements for editing. The coder can presume that the
* HTML that is output from this configuration will be within appropriate
* <html>, <body>, and <form> tags. The name of the form is always
* "editconnection". The connector does not need to be connected for this
* method to be called.
*
* @param threadContext is the local thread context.
* @param out is the output to which any HTML should be sent.
* @param parameters are the configuration parameters, as they currently
* exist, for this connection being configured.
* @param tabName is the current tab name.
*/
@Override
public void outputConfigurationBody(IThreadContext threadContext,
IHTTPOutput out, Locale locale, ConfigParams parameters, String tabName) throws ManifoldCFException, IOException {
Map<String, String> paramMap = new HashMap<String, String>();
paramMap.put(TAB_NAME_PARAM, tabName);
fillInServerParameters(paramMap, out, parameters);
Messages.outputResourceWithVelocity(out, locale, EDIT_CONFIG_FORWARD_SERVER, paramMap, true);
}
/**
* Process a configuration post. This method is called at the start of the
* connector's configuration page, whenever there is a possibility that form
* data for a connection has been posted. Its purpose is to gather form
* information and modify the configuration parameters accordingly. The name
* of the posted form is always "editconnection". The connector does not
* need to be connected for this method to be called.
*
* @param threadContext is the local thread context.
* @param variableContext is the set of variables available from the post,
* including binary file post information.
* @param parameters are the configuration parameters, as they currently
* exist, for this connection being configured.
* @return null if all is well, or a string error message if there is an
* error that should prevent saving of the connection (and cause a
* redirection to an error page).
*/
@Override
public String processConfigurationPost(IThreadContext threadContext,
IPostParameters variableContext, Locale locale, ConfigParams parameters)
throws ManifoldCFException {
String username = variableContext.getParameter(GridFSConstants.USERNAME_PARAM);
if (username != null) {
parameters.setParameter(GridFSConstants.USERNAME_PARAM, username);
}
String password = variableContext.getParameter(GridFSConstants.PASSWORD_PARAM);
if (password != null) {
parameters.setParameter(GridFSConstants.PASSWORD_PARAM, variableContext.mapKeyToPassword(password));
}
String db = variableContext.getParameter(GridFSConstants.DB_PARAM);
if (db != null) {
parameters.setParameter(GridFSConstants.DB_PARAM, db);
}
String bucket = variableContext.getParameter(GridFSConstants.BUCKET_PARAM);
if (bucket != null) {
parameters.setParameter(GridFSConstants.BUCKET_PARAM, bucket);
}
String port = variableContext.getParameter(GridFSConstants.PORT_PARAM);
if (port != null) {
parameters.setParameter(GridFSConstants.PORT_PARAM, port);
}
String host = variableContext.getParameter(GridFSConstants.HOST_PARAM);
if (host != null) {
parameters.setParameter(GridFSConstants.HOST_PARAM, host);
}
String url = variableContext.getParameter(GridFSConstants.URL_RETURN_FIELD_NAME_PARAM);
if (url != null) {
parameters.setParameter(GridFSConstants.URL_RETURN_FIELD_NAME_PARAM, url);
}
String acl = variableContext.getParameter(GridFSConstants.ACL_RETURN_FIELD_NAME_PARAM);
if (acl != null) {
parameters.setParameter(GridFSConstants.ACL_RETURN_FIELD_NAME_PARAM, acl);
}
String denyAcl = variableContext.getParameter(GridFSConstants.DENY_ACL_RETURN_FIELD_NAME_PARAM);
if (denyAcl != null) {
parameters.setParameter(GridFSConstants.DENY_ACL_RETURN_FIELD_NAME_PARAM, denyAcl);
}
return null;
}
/**
* View configuration. This method is called in the body section of the
* connector's view configuration page. Its purpose is to present the
* connection information to the user. The coder can presume that the HTML
* that is output from this configuration will be within appropriate <html>
* and <body> tags. The connector does not need to be connected for this
* method to be called.
*
* @param threadContext is the local thread context.
* @param out is the output to which any HTML should be sent.
* @param parameters are the configuration parameters, as they currently
* exist, for this connection being configured.
*/
@Override
public void viewConfiguration(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters) throws ManifoldCFException, IOException {
Map<String, String> paramMap = new HashMap<String, String>();
fillInServerParameters(paramMap, out, parameters);
Messages.outputResourceWithVelocity(out, locale, VIEW_CONFIG_FORWARD, paramMap, true);
}
/**
* Setup a session.
*
* @throws ManifoldCFException
*/
protected void getSession() throws ManifoldCFException {
if (session == null) {
if (StringUtils.isEmpty(db) || StringUtils.isEmpty(bucket)) {
throw new ManifoldCFException("GridFS: Database or bucket name cannot be empty.");
}
if (StringUtils.isEmpty(url)) {
throw new ManifoldCFException("GridFS: Metadata URL field cannot be empty.");
}
if (StringUtils.isEmpty(host) && StringUtils.isEmpty(port)) {
try {
session = new MongoClient().getDB(db);
} catch (UnknownHostException ex) {
throw new ManifoldCFException("GridFS: Default host is not found. Does mongod process run?" + ex.getMessage(), ex);
}
} else if (!StringUtils.isEmpty(host) && StringUtils.isEmpty(port)) {
try {
session = new MongoClient(host).getDB(db);
} catch (UnknownHostException ex) {
throw new ManifoldCFException("GridFS: Given host information is not valid or mongod process doesn't run" + ex.getMessage(), ex);
}
} else if (!StringUtils.isEmpty(host) && !StringUtils.isEmpty(port)) {
try {
int integerPort = Integer.parseInt(port);
session = new MongoClient(host, integerPort).getDB(db);
} catch (UnknownHostException ex) {
throw new ManifoldCFException("GridFS: Given information is not valid or mongod process doesn't run" + ex.getMessage(), ex);
} catch (NumberFormatException ex) {
throw new ManifoldCFException("GridFS: Given port is not valid number. " + ex.getMessage(), ex);
}
} else if (StringUtils.isEmpty(host) && !StringUtils.isEmpty(port)) {
try {
int integerPort = Integer.parseInt(port);
session = new MongoClient(host, integerPort).getDB(db);
} catch (UnknownHostException ex) {
throw new ManifoldCFException("GridFS: Given information is not valid or mongod process doesn't run" + ex.getMessage(), ex);
} catch (NumberFormatException ex) {
throw new ManifoldCFException("GridFS: Given port is not valid number. " + ex.getMessage(), ex);
}
}
if (!StringUtils.isEmpty(username) && !StringUtils.isEmpty(password)) {
boolean auth = session.authenticate(username, password.toCharArray());
if (!auth) {
throw new ManifoldCFException("GridFS: Given database username and password doesn't match.");
}
}
lastSessionFetch = System.currentTimeMillis();
}
}
/**
* Fill in a Server tab configuration parameter map for calling a Velocity
* template.
*
* @param paramMap is the map to fill in
* @param parameters is the current set of configuration parameters
*/
public void fillInServerParameters(Map<String, String> paramMap, IPasswordMapperActivity mapper, ConfigParams parameters) {
String usernameParam = parameters.getParameter(GridFSConstants.USERNAME_PARAM);
paramMap.put(GridFSConstants.USERNAME_PARAM, usernameParam);
String passwordParam = parameters.getParameter(GridFSConstants.PASSWORD_PARAM);
passwordParam = mapper.mapKeyToPassword(passwordParam);
paramMap.put(GridFSConstants.PASSWORD_PARAM, passwordParam);
String dbParam = parameters.getParameter(GridFSConstants.DB_PARAM);
if (StringUtils.isEmpty(dbParam)) {
dbParam = GridFSConstants.DEFAULT_DB_NAME;
}
paramMap.put(GridFSConstants.DB_PARAM, dbParam);
String bucketParam = parameters.getParameter(GridFSConstants.BUCKET_PARAM);
if (StringUtils.isEmpty(bucketParam)) {
bucketParam = GridFSConstants.DEFAULT_BUCKET_NAME;
}
paramMap.put(GridFSConstants.BUCKET_PARAM, bucketParam);
String hostParam = parameters.getParameter(GridFSConstants.HOST_PARAM);
paramMap.put(GridFSConstants.HOST_PARAM, hostParam);
String portParam = parameters.getParameter(GridFSConstants.PORT_PARAM);
paramMap.put(GridFSConstants.PORT_PARAM, portParam);
String urlParam = parameters.getParameter(GridFSConstants.URL_RETURN_FIELD_NAME_PARAM);
paramMap.put(GridFSConstants.URL_RETURN_FIELD_NAME_PARAM, urlParam);
String aclParam = parameters.getParameter(GridFSConstants.ACL_RETURN_FIELD_NAME_PARAM);
paramMap.put(GridFSConstants.ACL_RETURN_FIELD_NAME_PARAM, aclParam);
String denyAclParam = parameters.getParameter(GridFSConstants.DENY_ACL_RETURN_FIELD_NAME_PARAM);
paramMap.put(GridFSConstants.DENY_ACL_RETURN_FIELD_NAME_PARAM, denyAclParam);
}
/**
* Special column names, as far as document queries are concerned
*/
protected static HashMap documentKnownColumns;
static {
documentKnownColumns = new HashMap();
documentKnownColumns.put(GridFSConstants.DEFAULT_ID_FIELD_NAME, "");
documentKnownColumns.put(GridFSConstants.URL_RETURN_FIELD_NAME_PARAM, "");
}
/**
* Apply metadata to a repository document.
*
* @param rd is the repository document to apply the metadata to.
* @param metadataMap is the resultset row to use to get the metadata. All
* non-special columns from this row will be considered to be metadata.
*/
protected void applyMetadata(RepositoryDocument rd, DBObject metadataMap)
throws ManifoldCFException {
// Cycle through the document's fields
Iterator iter = metadataMap.keySet().iterator();
while (iter.hasNext()) {
String fieldName = (String) iter.next();
if (documentKnownColumns.get(fieldName) == null) {
// Consider this field to contain metadata.
// We can only accept non-binary metadata at this time.
Object metadata = metadataMap.get(fieldName);
if (!(metadata instanceof String)) {
throw new ManifoldCFException("Metadata field '" + fieldName + "' must be convertible to a string.");
}
rd.addField(fieldName, metadata.toString());
}
}
}
}
| connectors/gridfs/connector/src/main/java/org/apache/manifoldcf/crawler/connectors/gridfs/GridFSRepositoryConnector.java | /**
* Copyright 2014 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.manifoldcf.crawler.connectors.gridfs;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import com.mongodb.DBObject;
import com.mongodb.DBTCPConnector;
import com.mongodb.Mongo;
import com.mongodb.MongoClient;
import com.mongodb.gridfs.GridFS;
import com.mongodb.gridfs.GridFSDBFile;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.io.InputStream;
import java.net.UnknownHostException;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.manifoldcf.agents.interfaces.RepositoryDocument;
import org.apache.manifoldcf.agents.interfaces.ServiceInterruption;
import org.apache.manifoldcf.core.interfaces.ConfigParams;
import org.apache.manifoldcf.core.interfaces.Specification;
import org.apache.manifoldcf.core.interfaces.IHTTPOutput;
import org.apache.manifoldcf.core.interfaces.IPasswordMapperActivity;
import org.apache.manifoldcf.core.interfaces.IPostParameters;
import org.apache.manifoldcf.core.interfaces.IThreadContext;
import org.apache.manifoldcf.core.interfaces.ManifoldCFException;
import org.apache.manifoldcf.crawler.connectors.BaseRepositoryConnector;
import org.apache.manifoldcf.crawler.interfaces.IProcessActivity;
import org.apache.manifoldcf.crawler.interfaces.ISeedingActivity;
import org.apache.manifoldcf.crawler.interfaces.IExistingVersions;
import org.apache.manifoldcf.crawler.system.Logging;
import org.bson.types.ObjectId;
/**
*
* @author molgun
*/
public class GridFSRepositoryConnector extends BaseRepositoryConnector {
/**
* Activity name for the activity record.
*/
protected static final String ACTIVITY_FETCH = "fetch";
/**
* Server name for declaring bin name.
*/
protected static final String SERVER = "MongoDB - GridFS";
/**
* Session expiration milliseconds.
*/
protected static final long SESSION_EXPIRATION_MILLISECONDS = 30000L;
/**
* Endpoint username.
*/
protected String username = null;
/**
* Endpoint password.
*/
protected String password = null;
/**
* Endpoint host.
*/
protected String host = null;
/**
* Endpoint port.
*/
protected String port = null;
/**
* Endpoint db.
*/
protected String db = null;
/**
* Endpoint bucket.
*/
protected String bucket = null;
/**
* Endpoint url.
*/
protected String url = null;
/**
* Endpoint acl.
*/
protected String acl = null;
/**
* Endpoint denyAcl.
*/
protected String denyAcl = null;
/**
* MongoDB session.
*/
protected DB session = null;
/**
* Last session fetch time.
*/
protected long lastSessionFetch = -1L;
/**
* Forward to the javascript to check the configuration parameters.
*/
private static final String EDIT_CONFIG_HEADER_FORWARD = "editConfiguration.js";
/**
* Forward to the HTML template to view the configuration parameters.
*/
private static final String VIEW_CONFIG_FORWARD = "viewConfiguration.html";
/**
* Forward to the HTML template to edit the configuration parameters.
*/
private static final String EDIT_CONFIG_FORWARD_SERVER = "editConfiguration_Server.html";
/**
* GridFS server tab name.
*/
private static final String GRIDFS_SERVER_TAB_RESOURCE = "GridFSConnector.Server";
/**
* GridFS credentials tab name.
*/
private static final String GRIDFS_CREDENTIALS_TAB_RESOURCE = "GridFSConnector.Credentials";
/**
* Tab name parameter for managing the view of the Web UI.
*/
private static final String TAB_NAME_PARAM = "TabName";
/**
* Constructer.
*/
public GridFSRepositoryConnector() {
super();
}
/**
* Tell the world what model this connector uses for addSeedDocuments().
* This must return a model value as specified above. The connector does not
* have to be connected for this method to be called.
*
* @return the model type value.
*/
@Override
public String[] getBinNames(String documentIdentifier) {
return new String[]{SERVER};
}
/**
* Tell the world what model this connector uses for addSeedDocuments().
* This must return a model value as specified above. The connector does not
* have to be connected for this method to be called.
*
* @return the model type value.
*/
@Override
public int getConnectorModel() {
return super.getConnectorModel();
}
/**
* Return the list of activities that this connector supports (i.e. writes
* into the log). The connector does not have to be connected for this
* method to be called.
*
* @return the list.
*/
@Override
public String[] getActivitiesList() {
return new String[]{ACTIVITY_FETCH};
}
/**
* Connect.
*
* @param configParams is the set of configuration parameters, which in this
* case describe the root directory.
*/
@Override
public void connect(ConfigParams configParams) {
super.connect(configParams);
username = params.getParameter(GridFSConstants.USERNAME_PARAM);
password = params.getParameter(GridFSConstants.PASSWORD_PARAM);
host = params.getParameter(GridFSConstants.HOST_PARAM);
port = params.getParameter(GridFSConstants.PORT_PARAM);
db = params.getParameter(GridFSConstants.DB_PARAM);
bucket = params.getParameter(GridFSConstants.BUCKET_PARAM);
url = params.getParameter(GridFSConstants.URL_RETURN_FIELD_NAME_PARAM);
acl = params.getParameter(GridFSConstants.ACL_RETURN_FIELD_NAME_PARAM);
denyAcl = params.getParameter(GridFSConstants.DENY_ACL_RETURN_FIELD_NAME_PARAM);
}
/**
* Test the connection. Returns a string describing the connection
* integrity.
*
* @return the connection's status as a displayable string.
* @throws org.apache.manifoldcf.core.interfaces.ManifoldCFException
*/
@Override
public String check() throws ManifoldCFException {
try {
getSession();
if (session != null) {
Mongo currentMongoSession = session.getMongo();
DBTCPConnector currentTCPConnection = currentMongoSession.getConnector();
boolean status = currentTCPConnection.isOpen();
if (status) {
session.getMongo().close();
session = null;
return super.check();
} else {
session = null;
}
}
return "Not connected.";
} catch (ManifoldCFException e) {
return e.getMessage();
}
}
/**
* Close the connection. Call this before discarding this instance of the
* repository connector.
*
* @throws org.apache.manifoldcf.core.interfaces.ManifoldCFException
*/
@Override
public void disconnect() throws ManifoldCFException {
if (session != null) {
try {
session.getMongo().getConnector().close();
} catch (Exception e) {
Logging.connectors.error("GridFS: Error when trying to disconnect: " + e.getMessage());
throw new ManifoldCFException("GridFS: Error when trying to disconnect: " + e.getMessage(), e);
}
session = null;
lastSessionFetch = -1L;
username = null;
password = null;
host = null;
port = null;
db = null;
bucket = null;
url = null;
acl = null;
denyAcl = null;
}
}
/**
* This method is periodically called for all connectors that are connected
* but not in active use.
*
* @throws org.apache.manifoldcf.core.interfaces.ManifoldCFException
*/
@Override
public void poll() throws ManifoldCFException {
if (lastSessionFetch == -1L) {
return;
}
long currentTime = System.currentTimeMillis();
if (currentTime >= lastSessionFetch + SESSION_EXPIRATION_MILLISECONDS) {
if (session != null) {
session.getMongo().getConnector().close();
session = null;
}
lastSessionFetch = -1L;
}
}
/**
* This method is called to assess whether to count this connector instance
* should actually be counted as being connected.
*
* @return true if the connector instance is actually connected.
*/
@Override
public boolean isConnected() {
if (session == null) {
return false;
}
Mongo currentMongoSession = session.getMongo();
DBTCPConnector currentTCPConnection = currentMongoSession.getConnector();
return currentTCPConnection.isOpen();
}
/**
* Get the maximum number of documents to amalgamate together into one
* batch, for this connector.
*
* @return the maximum number. 0 indicates "unlimited".
*/
@Override
public int getMaxDocumentRequest() {
return super.getMaxDocumentRequest();
}
/**
* Return the list of relationship types that this connector recognizes.
*
* @return the list.
*/
@Override
public String[] getRelationshipTypes() {
return super.getRelationshipTypes();
}
/** Queue "seed" documents. Seed documents are the starting places for crawling activity. Documents
* are seeded when this method calls appropriate methods in the passed in ISeedingActivity object.
*
* This method can choose to find repository changes that happen only during the specified time interval.
* The seeds recorded by this method will be viewed by the framework based on what the
* getConnectorModel() method returns.
*
* It is not a big problem if the connector chooses to create more seeds than are
* strictly necessary; it is merely a question of overall work required.
*
* The end time and seeding version string passed to this method may be interpreted for greatest efficiency.
* For continuous crawling jobs, this method will
* be called once, when the job starts, and at various periodic intervals as the job executes.
*
* When a job's specification is changed, the framework automatically resets the seeding version string to null. The
* seeding version string may also be set to null on each job run, depending on the connector model returned by
* getConnectorModel().
*
* Note that it is always ok to send MORE documents rather than less to this method.
* The connector will be connected before this method can be called.
*@param activities is the interface this method should use to perform whatever framework actions are desired.
*@param spec is a document specification (that comes from the job).
*@param seedTime is the end of the time range of documents to consider, exclusive.
*@param lastSeedVersionString is the last seeding version string for this job, or null if the job has no previous seeding version string.
*@param jobMode is an integer describing how the job is being run, whether continuous or once-only.
*@return an updated seeding version string, to be stored with the job.
*/
@Override
public String addSeedDocuments(ISeedingActivity activities, Specification spec,
String lastSeedVersion, long seedTime, int jobMode)
throws ManifoldCFException, ServiceInterruption {
getSession();
DBCollection fsFiles = session.getCollection(
bucket + GridFSConstants.COLLECTION_SEPERATOR + GridFSConstants.FILES_COLLECTION_NAME
);
DBCursor dnc = fsFiles.find();
while (dnc.hasNext()) {
DBObject dbo = dnc.next();
String _id = dbo.get("_id").toString();
activities.addSeedDocument(_id);
if (Logging.connectors.isDebugEnabled()) {
Logging.connectors.debug("GridFS: Document _id = " + _id + " added to queue");
}
}
return "";
}
/** Process a set of documents.
* This is the method that should cause each document to be fetched, processed, and the results either added
* to the queue of documents for the current job, and/or entered into the incremental ingestion manager.
* The document specification allows this class to filter what is done based on the job.
* The connector will be connected before this method can be called.
*@param documentIdentifiers is the set of document identifiers to process.
*@param statuses are the currently-stored document versions for each document in the set of document identifiers
* passed in above.
*@param activities is the interface this method should use to queue up new document references
* and ingest documents.
*@param jobMode is an integer describing how the job is being run, whether continuous or once-only.
*@param usesDefaultAuthority will be true only if the authority in use for these documents is the default one.
*/
@Override
public void processDocuments(String[] documentIdentifiers, IExistingVersions statuses, Specification spec,
IProcessActivity activities, int jobMode, boolean usesDefaultAuthority)
throws ManifoldCFException, ServiceInterruption {
for (String documentIdentifier : documentIdentifiers) {
String versionString;
GridFS gfs;
GridFSDBFile document;
getSession();
String _id = documentIdentifier;
gfs = new GridFS(session, bucket);
document = gfs.findOne(new ObjectId(_id));
if (document == null) {
activities.deleteDocument(documentIdentifier);
continue;
} else {
DBObject metadata = document.getMetaData();
versionString = document.getMD5() + "+" + metadata != null
? Integer.toString(metadata.hashCode())
: StringUtils.EMPTY;
}
if (versionString.length() == 0 || activities.checkDocumentNeedsReindexing(documentIdentifier,versionString)) {
long startTime = System.currentTimeMillis();
String errorCode = "OK";
String errorDesc = null;
String version = versionString;
RepositoryDocument rd = new RepositoryDocument();
if (Logging.connectors.isDebugEnabled()) {
Logging.connectors.debug("GridFS: Processing document _id = " + _id);
}
DBObject metadata = document.getMetaData();
if (metadata == null) {
Logging.connectors.warn("GridFS: Document " + _id + " has a null metadata - skipping.");
activities.noDocument(_id,version);
continue;
}
String urlValue = document.getMetaData().get(this.url) == null
? StringUtils.EMPTY
: document.getMetaData().get(this.url).toString();
if (!StringUtils.isEmpty(urlValue)) {
boolean validURL;
try {
new java.net.URI(urlValue);
validURL = true;
} catch (java.net.URISyntaxException e) {
validURL = false;
}
if (validURL) {
long fileLenght = document.getLength();
InputStream is = document.getInputStream();
try {
Date indexingDate = new Date();
rd.setBinary(is, fileLenght);
rd.setCreatedDate(document.getUploadDate());
rd.setFileName(document.getFilename());
rd.setIndexingDate(indexingDate);
rd.setMimeType(document.getContentType());
String[] aclsArray = null;
String[] denyAclsArray = null;
if (acl != null) {
try {
Object aclObject = document.getMetaData().get(acl);
if (aclObject != null) {
List<String> acls = (List<String>) aclObject;
aclsArray = (String[]) acls.toArray();
}
} catch (ClassCastException e) {
// This is bad because security will fail
Logging.connectors.warn("GridFS: Document " + _id + " metadata ACL field doesn't contain List<String> type.");
throw new ManifoldCFException("Security decoding error: "+e.getMessage(),e);
}
}
if (denyAcl != null) {
try {
Object denyAclObject = document.getMetaData().get(denyAcl);
if (denyAclObject != null) {
List<String> denyAcls = (List<String>) denyAclObject;
denyAcls.add(GLOBAL_DENY_TOKEN);
denyAclsArray = (String[]) denyAcls.toArray();
}
} catch (ClassCastException e) {
// This is bad because security will fail
Logging.connectors.warn("GridFS: Document " + _id + " metadata DenyACL field doesn't contain List<String> type.");
throw new ManifoldCFException("Security decoding error: "+e.getMessage(),e);
}
}
rd.setSecurity(RepositoryDocument.SECURITY_TYPE_DOCUMENT,aclsArray,denyAclsArray);
try {
activities.ingestDocumentWithException(_id, version, urlValue, rd);
} catch (IOException e) {
handleIOException(e);
}
} finally {
try {
is.close();
} catch (IOException e) {
handleIOException(e);
}
}
gfs.getDB().getMongo().getConnector().close();
session = null;
activities.recordActivity(startTime, ACTIVITY_FETCH,
fileLenght, _id, errorCode, errorDesc, null);
} else {
Logging.connectors.warn("GridFS: Document " + _id + " has a invalid URL: " + urlValue + " - skipping.");
activities.noDocument(_id,version);
}
} else {
Logging.connectors.warn("GridFS: Document " + _id + " has a null URL - skipping.");
activities.noDocument(_id,version);
}
}
}
}
protected static void handleIOException(IOException e) throws ManifoldCFException, ServiceInterruption {
if (e instanceof InterruptedIOException) {
throw new ManifoldCFException(e.getMessage(), e, ManifoldCFException.INTERRUPTED);
} else {
throw new ManifoldCFException(e.getMessage(), e);
}
}
/**
* Output the configuration header section. This method is called in the
* head section of the connector's configuration page. Its purpose is to add
* the required tabs to the list, and to output any javascript methods that
* might be needed by the configuration editing HTML. The connector does not
* need to be connected for this method to be called.
*
* @param threadContext is the local thread context.
* @param out is the output to which any HTML should be sent.
* @param parameters are the configuration parameters, as they currently
* exist, for this connection being configured.
* @param tabsArray is an array of tab names. Add to this array any tab
* names that are specific to the connector.
* @throws org.apache.manifoldcf.core.interfaces.ManifoldCFException
* @throws java.io.IOException
*/
@Override
public void outputConfigurationHeader(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters, List<String> tabsArray) throws ManifoldCFException, IOException {
tabsArray.add(Messages.getString(locale, GRIDFS_SERVER_TAB_RESOURCE));
tabsArray.add(Messages.getString(locale, GRIDFS_CREDENTIALS_TAB_RESOURCE));
Map<String, String> paramMap = new HashMap<String, String>();
fillInServerParameters(paramMap, out, parameters);
Messages.outputResourceWithVelocity(out, locale, EDIT_CONFIG_HEADER_FORWARD, paramMap, true);
}
/**
* Output the configuration body section. This method is called in the body
* section of the connector's configuration page. Its purpose is to present
* the required form elements for editing. The coder can presume that the
* HTML that is output from this configuration will be within appropriate
* <html>, <body>, and <form> tags. The name of the form is always
* "editconnection". The connector does not need to be connected for this
* method to be called.
*
* @param threadContext is the local thread context.
* @param out is the output to which any HTML should be sent.
* @param parameters are the configuration parameters, as they currently
* exist, for this connection being configured.
* @param tabName is the current tab name.
*/
@Override
public void outputConfigurationBody(IThreadContext threadContext,
IHTTPOutput out, Locale locale, ConfigParams parameters, String tabName) throws ManifoldCFException, IOException {
Map<String, String> paramMap = new HashMap<String, String>();
paramMap.put(TAB_NAME_PARAM, tabName);
fillInServerParameters(paramMap, out, parameters);
Messages.outputResourceWithVelocity(out, locale, EDIT_CONFIG_FORWARD_SERVER, paramMap, true);
}
/**
* Process a configuration post. This method is called at the start of the
* connector's configuration page, whenever there is a possibility that form
* data for a connection has been posted. Its purpose is to gather form
* information and modify the configuration parameters accordingly. The name
* of the posted form is always "editconnection". The connector does not
* need to be connected for this method to be called.
*
* @param threadContext is the local thread context.
* @param variableContext is the set of variables available from the post,
* including binary file post information.
* @param parameters are the configuration parameters, as they currently
* exist, for this connection being configured.
* @return null if all is well, or a string error message if there is an
* error that should prevent saving of the connection (and cause a
* redirection to an error page).
*/
@Override
public String processConfigurationPost(IThreadContext threadContext,
IPostParameters variableContext, Locale locale, ConfigParams parameters)
throws ManifoldCFException {
String username = variableContext.getParameter(GridFSConstants.USERNAME_PARAM);
if (username != null) {
parameters.setParameter(GridFSConstants.USERNAME_PARAM, username);
}
String password = variableContext.getParameter(GridFSConstants.PASSWORD_PARAM);
if (password != null) {
parameters.setParameter(GridFSConstants.PASSWORD_PARAM, variableContext.mapKeyToPassword(password));
}
String db = variableContext.getParameter(GridFSConstants.DB_PARAM);
if (db != null) {
parameters.setParameter(GridFSConstants.DB_PARAM, db);
}
String bucket = variableContext.getParameter(GridFSConstants.BUCKET_PARAM);
if (bucket != null) {
parameters.setParameter(GridFSConstants.BUCKET_PARAM, bucket);
}
String port = variableContext.getParameter(GridFSConstants.PORT_PARAM);
if (port != null) {
parameters.setParameter(GridFSConstants.PORT_PARAM, port);
}
String host = variableContext.getParameter(GridFSConstants.HOST_PARAM);
if (host != null) {
parameters.setParameter(GridFSConstants.HOST_PARAM, host);
}
String url = variableContext.getParameter(GridFSConstants.URL_RETURN_FIELD_NAME_PARAM);
if (url != null) {
parameters.setParameter(GridFSConstants.URL_RETURN_FIELD_NAME_PARAM, url);
}
String acl = variableContext.getParameter(GridFSConstants.ACL_RETURN_FIELD_NAME_PARAM);
if (acl != null) {
parameters.setParameter(GridFSConstants.ACL_RETURN_FIELD_NAME_PARAM, acl);
}
String denyAcl = variableContext.getParameter(GridFSConstants.DENY_ACL_RETURN_FIELD_NAME_PARAM);
if (denyAcl != null) {
parameters.setParameter(GridFSConstants.DENY_ACL_RETURN_FIELD_NAME_PARAM, denyAcl);
}
return null;
}
/**
* View configuration. This method is called in the body section of the
* connector's view configuration page. Its purpose is to present the
* connection information to the user. The coder can presume that the HTML
* that is output from this configuration will be within appropriate <html>
* and <body> tags. The connector does not need to be connected for this
* method to be called.
*
* @param threadContext is the local thread context.
* @param out is the output to which any HTML should be sent.
* @param parameters are the configuration parameters, as they currently
* exist, for this connection being configured.
*/
@Override
public void viewConfiguration(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters) throws ManifoldCFException, IOException {
Map<String, String> paramMap = new HashMap<String, String>();
fillInServerParameters(paramMap, out, parameters);
Messages.outputResourceWithVelocity(out, locale, VIEW_CONFIG_FORWARD, paramMap, true);
}
/**
* Setup a session.
*
* @throws ManifoldCFException
*/
protected void getSession() throws ManifoldCFException {
if (session == null) {
if (StringUtils.isEmpty(db) || StringUtils.isEmpty(bucket)) {
throw new ManifoldCFException("GridFS: Database or bucket name cannot be empty.");
}
if (StringUtils.isEmpty(url)) {
throw new ManifoldCFException("GridFS: Metadata URL field cannot be empty.");
}
if (StringUtils.isEmpty(host) && StringUtils.isEmpty(port)) {
try {
session = new MongoClient().getDB(db);
} catch (UnknownHostException ex) {
throw new ManifoldCFException("GridFS: Default host is not found. Does mongod process run?" + ex.getMessage(), ex);
}
} else if (!StringUtils.isEmpty(host) && StringUtils.isEmpty(port)) {
try {
session = new MongoClient(host).getDB(db);
} catch (UnknownHostException ex) {
throw new ManifoldCFException("GridFS: Given host information is not valid or mongod process doesn't run" + ex.getMessage(), ex);
}
} else if (!StringUtils.isEmpty(host) && !StringUtils.isEmpty(port)) {
try {
int integerPort = Integer.parseInt(port);
session = new MongoClient(host, integerPort).getDB(db);
} catch (UnknownHostException ex) {
throw new ManifoldCFException("GridFS: Given information is not valid or mongod process doesn't run" + ex.getMessage(), ex);
} catch (NumberFormatException ex) {
throw new ManifoldCFException("GridFS: Given port is not valid number. " + ex.getMessage(), ex);
}
} else if (StringUtils.isEmpty(host) && !StringUtils.isEmpty(port)) {
try {
int integerPort = Integer.parseInt(port);
session = new MongoClient(host, integerPort).getDB(db);
} catch (UnknownHostException ex) {
throw new ManifoldCFException("GridFS: Given information is not valid or mongod process doesn't run" + ex.getMessage(), ex);
} catch (NumberFormatException ex) {
throw new ManifoldCFException("GridFS: Given port is not valid number. " + ex.getMessage(), ex);
}
}
if (!StringUtils.isEmpty(username) && !StringUtils.isEmpty(password)) {
boolean auth = session.authenticate(username, password.toCharArray());
if (!auth) {
throw new ManifoldCFException("GridFS: Given database username and password doesn't match.");
}
}
lastSessionFetch = System.currentTimeMillis();
}
}
/**
* Fill in a Server tab configuration parameter map for calling a Velocity
* template.
*
* @param paramMap is the map to fill in
* @param parameters is the current set of configuration parameters
*/
public void fillInServerParameters(Map<String, String> paramMap, IPasswordMapperActivity mapper, ConfigParams parameters) {
String usernameParam = parameters.getParameter(GridFSConstants.USERNAME_PARAM);
paramMap.put(GridFSConstants.USERNAME_PARAM, usernameParam);
String passwordParam = parameters.getParameter(GridFSConstants.PASSWORD_PARAM);
passwordParam = mapper.mapKeyToPassword(passwordParam);
paramMap.put(GridFSConstants.PASSWORD_PARAM, passwordParam);
String dbParam = parameters.getParameter(GridFSConstants.DB_PARAM);
if (StringUtils.isEmpty(dbParam)) {
dbParam = GridFSConstants.DEFAULT_DB_NAME;
}
paramMap.put(GridFSConstants.DB_PARAM, dbParam);
String bucketParam = parameters.getParameter(GridFSConstants.BUCKET_PARAM);
if (StringUtils.isEmpty(bucketParam)) {
bucketParam = GridFSConstants.DEFAULT_BUCKET_NAME;
}
paramMap.put(GridFSConstants.BUCKET_PARAM, bucketParam);
String hostParam = parameters.getParameter(GridFSConstants.HOST_PARAM);
paramMap.put(GridFSConstants.HOST_PARAM, hostParam);
String portParam = parameters.getParameter(GridFSConstants.PORT_PARAM);
paramMap.put(GridFSConstants.PORT_PARAM, portParam);
String urlParam = parameters.getParameter(GridFSConstants.URL_RETURN_FIELD_NAME_PARAM);
paramMap.put(GridFSConstants.URL_RETURN_FIELD_NAME_PARAM, urlParam);
String aclParam = parameters.getParameter(GridFSConstants.ACL_RETURN_FIELD_NAME_PARAM);
paramMap.put(GridFSConstants.ACL_RETURN_FIELD_NAME_PARAM, aclParam);
String denyAclParam = parameters.getParameter(GridFSConstants.DENY_ACL_RETURN_FIELD_NAME_PARAM);
paramMap.put(GridFSConstants.DENY_ACL_RETURN_FIELD_NAME_PARAM, denyAclParam);
}
/**
* Special column names, as far as document queries are concerned
*/
protected static HashMap documentKnownColumns;
static {
documentKnownColumns = new HashMap();
documentKnownColumns.put(GridFSConstants.DEFAULT_ID_FIELD_NAME, "");
documentKnownColumns.put(GridFSConstants.URL_RETURN_FIELD_NAME_PARAM, "");
}
/**
* Apply metadata to a repository document.
*
* @param rd is the repository document to apply the metadata to.
* @param metadataMap is the resultset row to use to get the metadata. All
* non-special columns from this row will be considered to be metadata.
*/
protected void applyMetadata(RepositoryDocument rd, DBObject metadataMap)
throws ManifoldCFException {
// Cycle through the document's fields
Iterator iter = metadataMap.keySet().iterator();
while (iter.hasNext()) {
String fieldName = (String) iter.next();
if (documentKnownColumns.get(fieldName) == null) {
// Consider this field to contain metadata.
// We can only accept non-binary metadata at this time.
Object metadata = metadataMap.get(fieldName);
if (!(metadata instanceof String)) {
throw new ManifoldCFException("Metadata field '" + fieldName + "' must be convertible to a string.");
}
rd.addField(fieldName, metadata.toString());
}
}
}
}
| Hook up gridfs connector
git-svn-id: 2bfa52b4cc115db473b9652e5d3efdc3a88ca1a3@1630087 13f79535-47bb-0310-9956-ffa450edef68
| connectors/gridfs/connector/src/main/java/org/apache/manifoldcf/crawler/connectors/gridfs/GridFSRepositoryConnector.java | Hook up gridfs connector |
|
Java | apache-2.0 | 31d4194a2dc8912656a40394a8556e8619b363db | 0 | linkedin/pinot,linkedin/pinot,linkedin/pinot,linkedin/pinot,linkedin/pinot | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pinot.core.query.aggregation.function;
import com.google.common.base.Preconditions;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.datasketches.memory.Memory;
import org.apache.datasketches.theta.Intersection;
import org.apache.datasketches.theta.SetOperation;
import org.apache.datasketches.theta.SetOperationBuilder;
import org.apache.datasketches.theta.Sketch;
import org.apache.datasketches.theta.Union;
import org.apache.pinot.common.function.AggregationFunctionType;
import org.apache.pinot.common.utils.DataSchema;
import org.apache.pinot.core.common.BlockValSet;
import org.apache.pinot.core.operator.filter.predicate.PredicateEvaluator;
import org.apache.pinot.core.operator.filter.predicate.PredicateEvaluatorProvider;
import org.apache.pinot.core.query.aggregation.AggregationResultHolder;
import org.apache.pinot.core.query.aggregation.ObjectAggregationResultHolder;
import org.apache.pinot.core.query.aggregation.ThetaSketchParams;
import org.apache.pinot.core.query.aggregation.groupby.GroupByResultHolder;
import org.apache.pinot.core.query.aggregation.groupby.ObjectGroupByResultHolder;
import org.apache.pinot.core.query.request.context.ExpressionContext;
import org.apache.pinot.core.query.request.context.FilterContext;
import org.apache.pinot.core.query.request.context.predicate.Predicate;
import org.apache.pinot.core.query.request.context.utils.QueryContextConverterUtils;
import org.apache.pinot.spi.data.FieldSpec;
import org.apache.pinot.sql.parsers.CalciteSqlParser;
/**
* Implementation of {@link AggregationFunction} to perform the distinct count aggregation using
* Theta Sketches.
* <p>TODO: For performance concern, use {@code List<Sketch>} as the intermediate result.
*/
@SuppressWarnings("Duplicates")
public class DistinctCountThetaSketchAggregationFunction implements AggregationFunction<Map<String, Sketch>, Long> {
private final ExpressionContext _thetaSketchColumn;
private final ThetaSketchParams _thetaSketchParams;
private final SetOperationBuilder _setOperationBuilder;
private final List<ExpressionContext> _inputExpressions;
private final FilterContext _postAggregationExpression;
private final Map<Predicate, PredicateInfo> _predicateInfoMap;
/**
* Constructor for the class.
* @param arguments List of parameters as arguments strings. At least three arguments are expected:
* <ul>
* <li> Required: First expression is interpreted as theta sketch column to aggregate on. </li>
* <li> Required: Second argument is the thetaSketchParams. </li>
* <li> Optional: Third to penultimate are predicates with LHS and RHS. </li>
* <li> Required: Last expression is the one that will be evaluated to compute final result. </li>
* </ul>
*/
public DistinctCountThetaSketchAggregationFunction(List<ExpressionContext> arguments)
throws SqlParseException {
int numArguments = arguments.size();
// NOTE: This function expects at least 3 arguments: theta-sketch column, parameters, post-aggregation expression.
Preconditions.checkArgument(numArguments >= 3,
"DistinctCountThetaSketch expects at least three arguments (theta-sketch column, parameters, post-aggregation expression), got: ",
numArguments);
// Initialize the theta-sketch column
_thetaSketchColumn = arguments.get(0);
Preconditions.checkArgument(_thetaSketchColumn.getType() == ExpressionContext.Type.IDENTIFIER,
"First argument of DistinctCountThetaSketch must be identifier (theta-sketch column)");
// Initialize the theta-sketch parameters
ExpressionContext paramsExpression = arguments.get(1);
Preconditions.checkArgument(paramsExpression.getType() == ExpressionContext.Type.LITERAL,
"Second argument of DistinctCountThetaSketch must be literal (parameters)");
_thetaSketchParams = ThetaSketchParams.fromString(paramsExpression.getLiteral());
// Initialize the theta-sketch set operation builder
_setOperationBuilder = getSetOperationBuilder();
// Initialize the input expressions
// NOTE: It is expected to cover the theta-sketch column and the lhs of the predicates.
_inputExpressions = new ArrayList<>();
_inputExpressions.add(_thetaSketchColumn);
// Initialize the post-aggregation expression
// NOTE: It is modeled as a filter
ExpressionContext postAggregationExpression = arguments.get(numArguments - 1);
Preconditions.checkArgument(paramsExpression.getType() == ExpressionContext.Type.LITERAL,
"Last argument of DistinctCountThetaSketch must be literal (post-aggregation expression)");
_postAggregationExpression = QueryContextConverterUtils
.getFilter(CalciteSqlParser.compileToExpression(postAggregationExpression.getLiteral()));
// Initialize the predicate map
_predicateInfoMap = new HashMap<>();
if (numArguments > 3) {
// Predicates are explicitly specified
for (int i = 2; i < numArguments - 1; i++) {
ExpressionContext predicateExpression = arguments.get(i);
Preconditions.checkArgument(predicateExpression.getType() == ExpressionContext.Type.LITERAL,
"Third to second last argument of DistinctCountThetaSketch must be literal (predicate expression)");
Predicate predicate = getPredicate(predicateExpression.getLiteral());
_inputExpressions.add(predicate.getLhs());
_predicateInfoMap.put(predicate, new PredicateInfo(predicate));
}
} else {
// Auto-derive predicates from the post-aggregation expression
Stack<FilterContext> stack = new Stack<>();
stack.push(_postAggregationExpression);
while (!stack.isEmpty()) {
FilterContext filter = stack.pop();
if (filter.getType() == FilterContext.Type.PREDICATE) {
Predicate predicate = filter.getPredicate();
_inputExpressions.add(predicate.getLhs());
_predicateInfoMap.put(predicate, new PredicateInfo(predicate));
} else {
stack.addAll(filter.getChildren());
}
}
}
}
@Override
public AggregationFunctionType getType() {
return AggregationFunctionType.DISTINCTCOUNTTHETASKETCH;
}
@Override
public String getColumnName() {
return AggregationFunctionType.DISTINCTCOUNTTHETASKETCH.getName() + "_" + _thetaSketchColumn;
}
@Override
public String getResultColumnName() {
return AggregationFunctionType.DISTINCTCOUNTTHETASKETCH.getName().toLowerCase() + "(" + _thetaSketchColumn + ")";
}
@Override
public List<ExpressionContext> getInputExpressions() {
return _inputExpressions;
}
@Override
public void accept(AggregationFunctionVisitorBase visitor) {
visitor.visit(this);
}
@Override
public AggregationResultHolder createAggregationResultHolder() {
return new ObjectAggregationResultHolder();
}
@Override
public GroupByResultHolder createGroupByResultHolder(int initialCapacity, int maxCapacity) {
return new ObjectGroupByResultHolder(initialCapacity, maxCapacity);
}
@Override
public void aggregate(int length, AggregationResultHolder aggregationResultHolder,
Map<ExpressionContext, BlockValSet> blockValSetMap) {
Map<Predicate, Union> unionMap = getUnionMap(aggregationResultHolder);
Sketch[] sketches = deserializeSketches(blockValSetMap.get(_thetaSketchColumn).getBytesValuesSV(), length);
for (PredicateInfo predicateInfo : _predicateInfoMap.values()) {
Predicate predicate = predicateInfo.getPredicate();
BlockValSet blockValSet = blockValSetMap.get(predicate.getLhs());
FieldSpec.DataType valueType = blockValSet.getValueType();
PredicateEvaluator predicateEvaluator = predicateInfo.getPredicateEvaluator(valueType);
Union union = unionMap.get(predicate);
switch (valueType) {
case INT:
int[] intValues = blockValSet.getIntValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(intValues[i])) {
union.update(sketches[i]);
}
}
break;
case LONG:
long[] longValues = blockValSet.getLongValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(longValues[i])) {
union.update(sketches[i]);
}
}
break;
case FLOAT:
float[] floatValues = blockValSet.getFloatValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(floatValues[i])) {
union.update(sketches[i]);
}
}
break;
case DOUBLE:
double[] doubleValues = blockValSet.getDoubleValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(doubleValues[i])) {
union.update(sketches[i]);
}
}
break;
case STRING:
String[] stringValues = blockValSet.getStringValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(stringValues[i])) {
union.update(sketches[i]);
}
}
break;
case BYTES:
byte[][] bytesValues = blockValSet.getBytesValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(bytesValues[i])) {
union.update(sketches[i]);
}
}
break;
default:
throw new IllegalStateException();
}
}
}
@Override
public void aggregateGroupBySV(int length, int[] groupKeyArray, GroupByResultHolder groupByResultHolder,
Map<ExpressionContext, BlockValSet> blockValSetMap) {
Sketch[] sketches = deserializeSketches(blockValSetMap.get(_thetaSketchColumn).getBytesValuesSV(), length);
for (PredicateInfo predicateInfo : _predicateInfoMap.values()) {
Predicate predicate = predicateInfo.getPredicate();
BlockValSet blockValSet = blockValSetMap.get(predicate.getLhs());
FieldSpec.DataType valueType = blockValSet.getValueType();
PredicateEvaluator predicateEvaluator = predicateInfo.getPredicateEvaluator(valueType);
switch (valueType) {
case INT:
int[] intValues = blockValSet.getIntValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(intValues[i])) {
getUnionMap(groupByResultHolder, groupKeyArray[i]).get(predicate).update(sketches[i]);
}
}
break;
case LONG:
long[] longValues = blockValSet.getLongValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(longValues[i])) {
getUnionMap(groupByResultHolder, groupKeyArray[i]).get(predicate).update(sketches[i]);
}
}
break;
case FLOAT:
float[] floatValues = blockValSet.getFloatValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(floatValues[i])) {
getUnionMap(groupByResultHolder, groupKeyArray[i]).get(predicate).update(sketches[i]);
}
}
break;
case DOUBLE:
double[] doubleValues = blockValSet.getDoubleValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(doubleValues[i])) {
getUnionMap(groupByResultHolder, groupKeyArray[i]).get(predicate).update(sketches[i]);
}
}
break;
case STRING:
String[] stringValues = blockValSet.getStringValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(stringValues[i])) {
getUnionMap(groupByResultHolder, groupKeyArray[i]).get(predicate).update(sketches[i]);
}
}
break;
case BYTES:
byte[][] bytesValues = blockValSet.getBytesValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(bytesValues[i])) {
getUnionMap(groupByResultHolder, groupKeyArray[i]).get(predicate).update(sketches[i]);
}
}
default:
throw new IllegalStateException();
}
}
}
@Override
public void aggregateGroupByMV(int length, int[][] groupKeysArray, GroupByResultHolder groupByResultHolder,
Map<ExpressionContext, BlockValSet> blockValSetMap) {
Sketch[] sketches = deserializeSketches(blockValSetMap.get(_thetaSketchColumn).getBytesValuesSV(), length);
for (PredicateInfo predicateInfo : _predicateInfoMap.values()) {
Predicate predicate = predicateInfo.getPredicate();
BlockValSet blockValSet = blockValSetMap.get(predicate.getLhs());
FieldSpec.DataType valueType = blockValSet.getValueType();
PredicateEvaluator predicateEvaluator = predicateInfo.getPredicateEvaluator(valueType);
switch (valueType) {
case INT:
int[] intValues = blockValSet.getIntValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(intValues[i])) {
for (int groupKey : groupKeysArray[i]) {
getUnionMap(groupByResultHolder, groupKey).get(predicate).update(sketches[i]);
}
}
}
break;
case LONG:
long[] longValues = blockValSet.getLongValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(longValues[i])) {
for (int groupKey : groupKeysArray[i]) {
getUnionMap(groupByResultHolder, groupKey).get(predicate).update(sketches[i]);
}
}
}
break;
case FLOAT:
float[] floatValues = blockValSet.getFloatValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(floatValues[i])) {
for (int groupKey : groupKeysArray[i]) {
getUnionMap(groupByResultHolder, groupKey).get(predicate).update(sketches[i]);
}
}
}
break;
case DOUBLE:
double[] doubleValues = blockValSet.getDoubleValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(doubleValues[i])) {
for (int groupKey : groupKeysArray[i]) {
getUnionMap(groupByResultHolder, groupKey).get(predicate).update(sketches[i]);
}
}
}
break;
case STRING:
String[] stringValues = blockValSet.getStringValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(stringValues[i])) {
for (int groupKey : groupKeysArray[i]) {
getUnionMap(groupByResultHolder, groupKey).get(predicate).update(sketches[i]);
}
}
}
break;
case BYTES:
byte[][] bytesValues = blockValSet.getBytesValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(bytesValues[i])) {
for (int groupKey : groupKeysArray[i]) {
getUnionMap(groupByResultHolder, groupKey).get(predicate).update(sketches[i]);
}
}
}
break;
default:
throw new IllegalStateException();
}
}
}
@Override
public Map<String, Sketch> extractAggregationResult(AggregationResultHolder aggregationResultHolder) {
Map<Predicate, Union> unionMap = aggregationResultHolder.getResult();
if (unionMap == null || unionMap.isEmpty()) {
return Collections.emptyMap();
}
Map<String, Sketch> result = new HashMap<>();
for (PredicateInfo predicateInfo : _predicateInfoMap.values()) {
result.put(predicateInfo.getStringPredicate(), unionMap.get(predicateInfo.getPredicate()).getResult());
}
return result;
}
@Override
public Map<String, Sketch> extractGroupByResult(GroupByResultHolder groupByResultHolder, int groupKey) {
Map<Predicate, Union> unionMap = groupByResultHolder.getResult(groupKey);
if (unionMap == null || unionMap.isEmpty()) {
return Collections.emptyMap();
}
Map<String, Sketch> result = new HashMap<>();
for (PredicateInfo predicateInfo : _predicateInfoMap.values()) {
result.put(predicateInfo.getStringPredicate(), unionMap.get(predicateInfo.getPredicate()).getResult());
}
return result;
}
@Override
public Map<String, Sketch> merge(Map<String, Sketch> intermediateResult1, Map<String, Sketch> intermediateResult2) {
if (intermediateResult1 == null || intermediateResult1.isEmpty()) {
return intermediateResult2;
} else if (intermediateResult2 == null || intermediateResult2.isEmpty()) {
return intermediateResult1;
}
// NOTE: Here we parse the map keys to Predicate to handle the non-standard predicate string returned from server
// side for backward-compatibility.
// TODO: Remove the extra parsing after releasing 0.5.0
Map<Predicate, Union> unionMap = getDefaultUnionMap();
for (Map.Entry<String, Sketch> entry : intermediateResult1.entrySet()) {
Predicate predicate = getPredicate(entry.getKey());
unionMap.get(predicate).update(entry.getValue());
}
for (Map.Entry<String, Sketch> entry : intermediateResult2.entrySet()) {
Predicate predicate = getPredicate(entry.getKey());
unionMap.get(predicate).update(entry.getValue());
}
Map<String, Sketch> mergedResult = new HashMap<>();
for (Map.Entry<Predicate, Union> entry : unionMap.entrySet()) {
mergedResult.put(entry.getKey().toString(), entry.getValue().getResult());
}
return mergedResult;
}
@Override
public boolean isIntermediateResultComparable() {
return false;
}
@Override
public DataSchema.ColumnDataType getIntermediateResultColumnType() {
return DataSchema.ColumnDataType.OBJECT;
}
@Override
public DataSchema.ColumnDataType getFinalResultColumnType() {
return DataSchema.ColumnDataType.LONG;
}
@Override
public Long extractFinalResult(Map<String, Sketch> intermediateResult) {
Sketch finalSketch = extractFinalSketch(intermediateResult);
return Math.round(finalSketch.getEstimate());
}
private Predicate getPredicate(String predicateString) {
FilterContext filter;
try {
filter = QueryContextConverterUtils.getFilter(CalciteSqlParser.compileToExpression(predicateString));
} catch (SqlParseException e) {
throw new IllegalArgumentException("Invalid predicate string: " + predicateString);
}
// TODO: Add support for complex predicates with AND/OR.
Preconditions.checkArgument(filter.getType() == FilterContext.Type.PREDICATE, "Invalid predicate string: %s",
predicateString);
return filter.getPredicate();
}
private Map<Predicate, Union> getUnionMap(AggregationResultHolder aggregationResultHolder) {
Map<Predicate, Union> unionMap = aggregationResultHolder.getResult();
if (unionMap == null) {
unionMap = getDefaultUnionMap();
aggregationResultHolder.setValue(unionMap);
}
return unionMap;
}
private Map<Predicate, Union> getUnionMap(GroupByResultHolder groupByResultHolder, int groupKey) {
Map<Predicate, Union> unionMap = groupByResultHolder.getResult(groupKey);
if (unionMap == null) {
unionMap = getDefaultUnionMap();
groupByResultHolder.setValueForKey(groupKey, unionMap);
}
return unionMap;
}
private Map<Predicate, Union> getDefaultUnionMap() {
Map<Predicate, Union> unionMap = new HashMap<>();
for (Predicate predicate : _predicateInfoMap.keySet()) {
unionMap.put(predicate, _setOperationBuilder.buildUnion());
}
return unionMap;
}
private Sketch[] deserializeSketches(byte[][] serializedSketches, int length) {
Sketch[] sketches = new Sketch[length];
for (int i = 0; i < length; i++) {
sketches[i] = Sketch.wrap(Memory.wrap(serializedSketches[i]));
}
return sketches;
}
/**
* Evaluates the theta-sketch post-aggregation expression, which is composed by performing AND/OR on top of the
* pre-defined predicates. These predicates are evaluated during the aggregation phase, and the cached results are
* passed to this method to be used when evaluating the expression.
*
* @param postAggregationExpression Post-aggregation expression to evaluate (modeled as a filter)
* @param sketchMap Precomputed sketches for predicates that are part of the expression.
* @return Overall evaluated sketch for the expression.
*/
private Sketch evalPostAggregationExpression(FilterContext postAggregationExpression,
Map<Predicate, Sketch> sketchMap) {
switch (postAggregationExpression.getType()) {
case AND:
Intersection intersection = _setOperationBuilder.buildIntersection();
for (FilterContext child : postAggregationExpression.getChildren()) {
intersection.update(evalPostAggregationExpression(child, sketchMap));
}
return intersection.getResult();
case OR:
Union union = _setOperationBuilder.buildUnion();
for (FilterContext child : postAggregationExpression.getChildren()) {
union.update(evalPostAggregationExpression(child, sketchMap));
}
return union.getResult();
case PREDICATE:
return sketchMap.get(postAggregationExpression.getPredicate());
default:
throw new IllegalStateException();
}
}
/**
* Extracts the final sketch from the intermediate result by applying the post-aggregation expression on it.
*
* @param intermediateResult Intermediate result
* @return Final Sketch obtained by computing the post-aggregation expression on intermediate result
*/
protected Sketch extractFinalSketch(Map<String, Sketch> intermediateResult) {
// NOTE: Here we parse the map keys to Predicate to handle the non-standard predicate string returned from server
// side for backward-compatibility.
// TODO: Remove the extra parsing after releasing 0.5.0
Map<Predicate, Sketch> sketchMap = new HashMap<>();
for (Map.Entry<String, Sketch> entry : intermediateResult.entrySet()) {
Predicate predicate = getPredicate(entry.getKey());
sketchMap.put(predicate, entry.getValue());
}
return evalPostAggregationExpression(_postAggregationExpression, sketchMap);
}
/**
* Returns the theta-sketch SetOperation builder properly configured.
* Currently, only setting of nominalEntries is supported.
* @return SetOperationBuilder
*/
private SetOperationBuilder getSetOperationBuilder() {
return _thetaSketchParams == null ? SetOperation.builder()
: SetOperation.builder().setNominalEntries(_thetaSketchParams.getNominalEntries());
}
/**
* Helper class to store predicate related information:
* <ul>
* <li>Predicate</li>
* <li>String representation of the predicate</li>
* <li>Predicate evaluator</li>
* </ul>
*/
private static class PredicateInfo {
final Predicate _predicate;
final String _stringPredicate;
PredicateEvaluator _predicateEvaluator;
PredicateInfo(Predicate predicate) {
_predicate = predicate;
_stringPredicate = predicate.toString();
_predicateEvaluator = null; // Initialized lazily
}
Predicate getPredicate() {
return _predicate;
}
String getStringPredicate() {
return _stringPredicate;
}
/**
* Since PredicateEvaluator requires data-type, it is initialized lazily.
*/
PredicateEvaluator getPredicateEvaluator(FieldSpec.DataType dataType) {
if (_predicateEvaluator == null) {
_predicateEvaluator = PredicateEvaluatorProvider.getPredicateEvaluator(_predicate, null, dataType);
}
return _predicateEvaluator;
}
}
}
| pinot-core/src/main/java/org/apache/pinot/core/query/aggregation/function/DistinctCountThetaSketchAggregationFunction.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pinot.core.query.aggregation.function;
import com.google.common.base.Preconditions;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.datasketches.memory.Memory;
import org.apache.datasketches.theta.Intersection;
import org.apache.datasketches.theta.SetOperation;
import org.apache.datasketches.theta.SetOperationBuilder;
import org.apache.datasketches.theta.Sketch;
import org.apache.datasketches.theta.Union;
import org.apache.pinot.common.function.AggregationFunctionType;
import org.apache.pinot.common.utils.DataSchema;
import org.apache.pinot.core.common.BlockValSet;
import org.apache.pinot.core.operator.filter.predicate.PredicateEvaluator;
import org.apache.pinot.core.operator.filter.predicate.PredicateEvaluatorProvider;
import org.apache.pinot.core.query.aggregation.AggregationResultHolder;
import org.apache.pinot.core.query.aggregation.ObjectAggregationResultHolder;
import org.apache.pinot.core.query.aggregation.ThetaSketchParams;
import org.apache.pinot.core.query.aggregation.groupby.GroupByResultHolder;
import org.apache.pinot.core.query.aggregation.groupby.ObjectGroupByResultHolder;
import org.apache.pinot.core.query.request.context.ExpressionContext;
import org.apache.pinot.core.query.request.context.FilterContext;
import org.apache.pinot.core.query.request.context.predicate.Predicate;
import org.apache.pinot.core.query.request.context.utils.QueryContextConverterUtils;
import org.apache.pinot.spi.data.FieldSpec;
import org.apache.pinot.sql.parsers.CalciteSqlParser;
/**
* Implementation of {@link AggregationFunction} to perform the distinct count aggregation using
* Theta Sketches.
* <p>TODO: For performance concern, use {@code List<Sketch>} as the intermediate result.
*/
@SuppressWarnings("Duplicates")
public class DistinctCountThetaSketchAggregationFunction implements AggregationFunction<Map<String, Sketch>, Long> {
private final ExpressionContext _thetaSketchColumn;
private final ThetaSketchParams _thetaSketchParams;
private final SetOperationBuilder _setOperationBuilder;
private final List<ExpressionContext> _inputExpressions;
private final FilterContext _postAggregationExpression;
private final Map<Predicate, PredicateInfo> _predicateInfoMap;
/**
* Constructor for the class.
* @param arguments List of parameters as arguments strings. At least three arguments are expected:
* <ul>
* <li> Required: First expression is interpreted as theta sketch column to aggregate on. </li>
* <li> Required: Second argument is the thetaSketchParams. </li>
* <li> Optional: Third to penultimate are predicates with LHS and RHS. </li>
* <li> Required: Last expression is the one that will be evaluated to compute final result. </li>
* </ul>
*/
public DistinctCountThetaSketchAggregationFunction(List<ExpressionContext> arguments)
throws SqlParseException {
int numArguments = arguments.size();
// NOTE: This function expects at least 3 arguments: theta-sketch column, parameters, post-aggregation expression.
Preconditions.checkArgument(numArguments >= 3,
"DistinctCountThetaSketch expects at least three arguments (theta-sketch column, parameters, post-aggregation expression), got: ",
numArguments);
// Initialize the theta-sketch column
_thetaSketchColumn = arguments.get(0);
Preconditions.checkArgument(_thetaSketchColumn.getType() == ExpressionContext.Type.IDENTIFIER,
"First argument of DistinctCountThetaSketch must be identifier (theta-sketch column)");
// Initialize the theta-sketch parameters
ExpressionContext paramsExpression = arguments.get(1);
Preconditions.checkArgument(paramsExpression.getType() == ExpressionContext.Type.LITERAL,
"Second argument of DistinctCountThetaSketch must be literal (parameters)");
_thetaSketchParams = ThetaSketchParams.fromString(paramsExpression.getLiteral());
// Initialize the theta-sketch set operation builder
_setOperationBuilder = getSetOperationBuilder();
// Initialize the input expressions
// NOTE: It is expected to cover the theta-sketch column and the lhs of the predicates.
_inputExpressions = new ArrayList<>();
_inputExpressions.add(_thetaSketchColumn);
// Initialize the post-aggregation expression
// NOTE: It is modeled as a filter
ExpressionContext postAggregationExpression = arguments.get(numArguments - 1);
Preconditions.checkArgument(paramsExpression.getType() == ExpressionContext.Type.LITERAL,
"Last argument of DistinctCountThetaSketch must be literal (post-aggregation expression)");
_postAggregationExpression = QueryContextConverterUtils
.getFilter(CalciteSqlParser.compileToExpression(postAggregationExpression.getLiteral()));
// Initialize the predicate map
_predicateInfoMap = new HashMap<>();
if (numArguments > 3) {
// Predicates are explicitly specified
for (int i = 2; i < numArguments - 1; i++) {
ExpressionContext predicateExpression = arguments.get(i);
Preconditions.checkArgument(predicateExpression.getType() == ExpressionContext.Type.LITERAL,
"Third to second last argument of DistinctCountThetaSketch must be literal (predicate expression)");
Predicate predicate = getPredicate(predicateExpression.getLiteral());
_inputExpressions.add(predicate.getLhs());
_predicateInfoMap.put(predicate, new PredicateInfo(predicate));
}
} else {
// Auto-derive predicates from the post-aggregation expression
Stack<FilterContext> stack = new Stack<>();
stack.push(_postAggregationExpression);
while (!stack.isEmpty()) {
FilterContext filter = stack.pop();
if (filter.getType() == FilterContext.Type.PREDICATE) {
Predicate predicate = filter.getPredicate();
_inputExpressions.add(predicate.getLhs());
_predicateInfoMap.put(predicate, new PredicateInfo(predicate));
} else {
stack.addAll(filter.getChildren());
}
}
}
}
@Override
public AggregationFunctionType getType() {
return AggregationFunctionType.DISTINCTCOUNTTHETASKETCH;
}
@Override
public String getColumnName() {
return AggregationFunctionType.DISTINCTCOUNTTHETASKETCH.getName() + "_" + _thetaSketchColumn;
}
@Override
public String getResultColumnName() {
return AggregationFunctionType.DISTINCTCOUNTTHETASKETCH.getName().toLowerCase() + "(" + _thetaSketchColumn + ")";
}
@Override
public List<ExpressionContext> getInputExpressions() {
return _inputExpressions;
}
@Override
public void accept(AggregationFunctionVisitorBase visitor) {
visitor.visit(this);
}
@Override
public AggregationResultHolder createAggregationResultHolder() {
return new ObjectAggregationResultHolder();
}
@Override
public GroupByResultHolder createGroupByResultHolder(int initialCapacity, int maxCapacity) {
return new ObjectGroupByResultHolder(initialCapacity, maxCapacity);
}
@Override
public void aggregate(int length, AggregationResultHolder aggregationResultHolder,
Map<ExpressionContext, BlockValSet> blockValSetMap) {
Map<Predicate, Union> unionMap = getUnionMap(aggregationResultHolder);
Sketch[] sketches = deserializeSketches(blockValSetMap.get(_thetaSketchColumn).getBytesValuesSV(), length);
for (PredicateInfo predicateInfo : _predicateInfoMap.values()) {
Predicate predicate = predicateInfo.getPredicate();
BlockValSet blockValSet = blockValSetMap.get(predicate.getLhs());
FieldSpec.DataType valueType = blockValSet.getValueType();
PredicateEvaluator predicateEvaluator = predicateInfo.getPredicateEvaluator(valueType);
Union union = unionMap.get(predicate);
switch (valueType) {
case INT:
int[] intValues = blockValSet.getIntValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(intValues[i])) {
union.update(sketches[i]);
}
}
break;
case LONG:
long[] longValues = blockValSet.getLongValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(longValues[i])) {
union.update(sketches[i]);
}
}
break;
case FLOAT:
float[] floatValues = blockValSet.getFloatValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(floatValues[i])) {
union.update(sketches[i]);
}
}
break;
case DOUBLE:
double[] doubleValues = blockValSet.getDoubleValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(doubleValues[i])) {
union.update(sketches[i]);
}
}
break;
case STRING:
String[] stringValues = blockValSet.getStringValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(stringValues[i])) {
union.update(sketches[i]);
}
}
break;
case BYTES:
byte[][] bytesValues = blockValSet.getBytesValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(bytesValues[i])) {
union.update(sketches[i]);
}
}
break;
default:
throw new IllegalStateException();
}
}
}
@Override
public void aggregateGroupBySV(int length, int[] groupKeyArray, GroupByResultHolder groupByResultHolder,
Map<ExpressionContext, BlockValSet> blockValSetMap) {
Sketch[] sketches = deserializeSketches(blockValSetMap.get(_thetaSketchColumn).getBytesValuesSV(), length);
for (PredicateInfo predicateInfo : _predicateInfoMap.values()) {
Predicate predicate = predicateInfo.getPredicate();
BlockValSet blockValSet = blockValSetMap.get(predicate.getLhs());
FieldSpec.DataType valueType = blockValSet.getValueType();
PredicateEvaluator predicateEvaluator = predicateInfo.getPredicateEvaluator(valueType);
switch (valueType) {
case INT:
int[] intValues = blockValSet.getIntValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(intValues[i])) {
getUnionMap(groupByResultHolder, groupKeyArray[i]).get(predicate).update(sketches[i]);
}
}
break;
case LONG:
long[] longValues = blockValSet.getLongValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(longValues[i])) {
getUnionMap(groupByResultHolder, groupKeyArray[i]).get(predicate).update(sketches[i]);
}
}
break;
case FLOAT:
float[] floatValues = blockValSet.getFloatValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(floatValues[i])) {
getUnionMap(groupByResultHolder, groupKeyArray[i]).get(predicate).update(sketches[i]);
}
}
break;
case DOUBLE:
double[] doubleValues = blockValSet.getDoubleValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(doubleValues[i])) {
getUnionMap(groupByResultHolder, groupKeyArray[i]).get(predicate).update(sketches[i]);
}
}
break;
case STRING:
String[] stringValues = blockValSet.getStringValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(stringValues[i])) {
getUnionMap(groupByResultHolder, groupKeyArray[i]).get(predicate).update(sketches[i]);
}
}
break;
case BYTES:
byte[][] bytesValues = blockValSet.getBytesValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(bytesValues[i])) {
getUnionMap(groupByResultHolder, groupKeyArray[i]).get(predicate).update(sketches[i]);
}
}
default:
throw new IllegalStateException();
}
}
}
@Override
public void aggregateGroupByMV(int length, int[][] groupKeysArray, GroupByResultHolder groupByResultHolder,
Map<ExpressionContext, BlockValSet> blockValSetMap) {
Sketch[] sketches = deserializeSketches(blockValSetMap.get(_thetaSketchColumn).getBytesValuesSV(), length);
for (PredicateInfo predicateInfo : _predicateInfoMap.values()) {
Predicate predicate = predicateInfo.getPredicate();
BlockValSet blockValSet = blockValSetMap.get(predicate.getLhs());
FieldSpec.DataType valueType = blockValSet.getValueType();
PredicateEvaluator predicateEvaluator = predicateInfo.getPredicateEvaluator(valueType);
switch (valueType) {
case INT:
int[] intValues = blockValSet.getIntValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(intValues[i])) {
for (int groupKey : groupKeysArray[i]) {
getUnionMap(groupByResultHolder, groupKey).get(predicate).update(sketches[i]);
}
}
}
break;
case LONG:
long[] longValues = blockValSet.getLongValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(longValues[i])) {
for (int groupKey : groupKeysArray[i]) {
getUnionMap(groupByResultHolder, groupKey).get(predicate).update(sketches[i]);
}
}
}
break;
case FLOAT:
float[] floatValues = blockValSet.getFloatValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(floatValues[i])) {
for (int groupKey : groupKeysArray[i]) {
getUnionMap(groupByResultHolder, groupKey).get(predicate).update(sketches[i]);
}
}
}
break;
case DOUBLE:
double[] doubleValues = blockValSet.getDoubleValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(doubleValues[i])) {
for (int groupKey : groupKeysArray[i]) {
getUnionMap(groupByResultHolder, groupKey).get(predicate).update(sketches[i]);
}
}
}
break;
case STRING:
String[] stringValues = blockValSet.getStringValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(stringValues[i])) {
for (int groupKey : groupKeysArray[i]) {
getUnionMap(groupByResultHolder, groupKey).get(predicate).update(sketches[i]);
}
}
}
break;
case BYTES:
byte[][] bytesValues = blockValSet.getBytesValuesSV();
for (int i = 0; i < length; i++) {
if (predicateEvaluator.applySV(bytesValues[i])) {
for (int groupKey : groupKeysArray[i]) {
getUnionMap(groupByResultHolder, groupKey).get(predicate).update(sketches[i]);
}
}
}
break;
default:
throw new IllegalStateException();
}
}
}
@Override
public Map<String, Sketch> extractAggregationResult(AggregationResultHolder aggregationResultHolder) {
Map<Predicate, Union> unionMap = getUnionMap(aggregationResultHolder);
Map<String, Sketch> result = new HashMap<>();
for (PredicateInfo predicateInfo : _predicateInfoMap.values()) {
result.put(predicateInfo.getStringPredicate(), unionMap.get(predicateInfo.getPredicate()).getResult());
}
return result;
}
@Override
public Map<String, Sketch> extractGroupByResult(GroupByResultHolder groupByResultHolder, int groupKey) {
Map<Predicate, Union> unionMap = getUnionMap(groupByResultHolder, groupKey);
Map<String, Sketch> result = new HashMap<>();
for (PredicateInfo predicateInfo : _predicateInfoMap.values()) {
result.put(predicateInfo.getStringPredicate(), unionMap.get(predicateInfo.getPredicate()).getResult());
}
return result;
}
@Override
public Map<String, Sketch> merge(Map<String, Sketch> intermediateResult1, Map<String, Sketch> intermediateResult2) {
if (intermediateResult1 == null) {
return intermediateResult2;
} else if (intermediateResult2 == null) {
return intermediateResult1;
}
// NOTE: Here we parse the map keys to Predicate to handle the non-standard predicate string returned from server
// side for backward-compatibility.
// TODO: Remove the extra parsing after releasing 0.5.0
Map<Predicate, Union> unionMap = getDefaultUnionMap();
for (Map.Entry<String, Sketch> entry : intermediateResult1.entrySet()) {
Predicate predicate = getPredicate(entry.getKey());
unionMap.get(predicate).update(entry.getValue());
}
for (Map.Entry<String, Sketch> entry : intermediateResult2.entrySet()) {
Predicate predicate = getPredicate(entry.getKey());
unionMap.get(predicate).update(entry.getValue());
}
Map<String, Sketch> mergedResult = new HashMap<>();
for (Map.Entry<Predicate, Union> entry : unionMap.entrySet()) {
mergedResult.put(entry.getKey().toString(), entry.getValue().getResult());
}
return mergedResult;
}
@Override
public boolean isIntermediateResultComparable() {
return false;
}
@Override
public DataSchema.ColumnDataType getIntermediateResultColumnType() {
return DataSchema.ColumnDataType.OBJECT;
}
@Override
public DataSchema.ColumnDataType getFinalResultColumnType() {
return DataSchema.ColumnDataType.LONG;
}
@Override
public Long extractFinalResult(Map<String, Sketch> intermediateResult) {
Sketch finalSketch = extractFinalSketch(intermediateResult);
return Math.round(finalSketch.getEstimate());
}
private Predicate getPredicate(String predicateString) {
FilterContext filter;
try {
filter = QueryContextConverterUtils.getFilter(CalciteSqlParser.compileToExpression(predicateString));
} catch (SqlParseException e) {
throw new IllegalArgumentException("Invalid predicate string: " + predicateString);
}
// TODO: Add support for complex predicates with AND/OR.
Preconditions.checkArgument(filter.getType() == FilterContext.Type.PREDICATE, "Invalid predicate string: %s",
predicateString);
return filter.getPredicate();
}
private Map<Predicate, Union> getUnionMap(AggregationResultHolder aggregationResultHolder) {
Map<Predicate, Union> unionMap = aggregationResultHolder.getResult();
if (unionMap == null) {
unionMap = getDefaultUnionMap();
aggregationResultHolder.setValue(unionMap);
}
return unionMap;
}
private Map<Predicate, Union> getUnionMap(GroupByResultHolder groupByResultHolder, int groupKey) {
Map<Predicate, Union> unionMap = groupByResultHolder.getResult(groupKey);
if (unionMap == null) {
unionMap = getDefaultUnionMap();
groupByResultHolder.setValueForKey(groupKey, unionMap);
}
return unionMap;
}
private Map<Predicate, Union> getDefaultUnionMap() {
Map<Predicate, Union> unionMap = new HashMap<>();
for (Predicate predicate : _predicateInfoMap.keySet()) {
unionMap.put(predicate, _setOperationBuilder.buildUnion());
}
return unionMap;
}
private Sketch[] deserializeSketches(byte[][] serializedSketches, int length) {
Sketch[] sketches = new Sketch[length];
for (int i = 0; i < length; i++) {
sketches[i] = Sketch.wrap(Memory.wrap(serializedSketches[i]));
}
return sketches;
}
/**
* Evaluates the theta-sketch post-aggregation expression, which is composed by performing AND/OR on top of the
* pre-defined predicates. These predicates are evaluated during the aggregation phase, and the cached results are
* passed to this method to be used when evaluating the expression.
*
* @param postAggregationExpression Post-aggregation expression to evaluate (modeled as a filter)
* @param sketchMap Precomputed sketches for predicates that are part of the expression.
* @return Overall evaluated sketch for the expression.
*/
private Sketch evalPostAggregationExpression(FilterContext postAggregationExpression,
Map<Predicate, Sketch> sketchMap) {
switch (postAggregationExpression.getType()) {
case AND:
Intersection intersection = _setOperationBuilder.buildIntersection();
for (FilterContext child : postAggregationExpression.getChildren()) {
intersection.update(evalPostAggregationExpression(child, sketchMap));
}
return intersection.getResult();
case OR:
Union union = _setOperationBuilder.buildUnion();
for (FilterContext child : postAggregationExpression.getChildren()) {
union.update(evalPostAggregationExpression(child, sketchMap));
}
return union.getResult();
case PREDICATE:
return sketchMap.get(postAggregationExpression.getPredicate());
default:
throw new IllegalStateException();
}
}
/**
* Extracts the final sketch from the intermediate result by applying the post-aggregation expression on it.
*
* @param intermediateResult Intermediate result
* @return Final Sketch obtained by computing the post-aggregation expression on intermediate result
*/
protected Sketch extractFinalSketch(Map<String, Sketch> intermediateResult) {
// NOTE: Here we parse the map keys to Predicate to handle the non-standard predicate string returned from server
// side for backward-compatibility.
// TODO: Remove the extra parsing after releasing 0.5.0
Map<Predicate, Sketch> sketchMap = new HashMap<>();
for (Map.Entry<String, Sketch> entry : intermediateResult.entrySet()) {
Predicate predicate = getPredicate(entry.getKey());
sketchMap.put(predicate, entry.getValue());
}
return evalPostAggregationExpression(_postAggregationExpression, sketchMap);
}
/**
* Returns the theta-sketch SetOperation builder properly configured.
* Currently, only setting of nominalEntries is supported.
* @return SetOperationBuilder
*/
private SetOperationBuilder getSetOperationBuilder() {
return _thetaSketchParams == null ? SetOperation.builder()
: SetOperation.builder().setNominalEntries(_thetaSketchParams.getNominalEntries());
}
/**
* Helper class to store predicate related information:
* <ul>
* <li>Predicate</li>
* <li>String representation of the predicate</li>
* <li>Predicate evaluator</li>
* </ul>
*/
private static class PredicateInfo {
final Predicate _predicate;
final String _stringPredicate;
PredicateEvaluator _predicateEvaluator;
PredicateInfo(Predicate predicate) {
_predicate = predicate;
_stringPredicate = predicate.toString();
_predicateEvaluator = null; // Initialized lazily
}
Predicate getPredicate() {
return _predicate;
}
String getStringPredicate() {
return _stringPredicate;
}
/**
* Since PredicateEvaluator requires data-type, it is initialized lazily.
*/
PredicateEvaluator getPredicateEvaluator(FieldSpec.DataType dataType) {
if (_predicateEvaluator == null) {
_predicateEvaluator = PredicateEvaluatorProvider.getPredicateEvaluator(_predicate, null, dataType);
}
return _predicateEvaluator;
}
}
}
| Avoid redundant merge of empty sketches. (#5783)
For segments where no rows are selected, the intermediate and the final merge steps
still create empty Union objects and merge them. This can cause huge latency degradation
in cases there is a large number of segments, and large value of nominal entriesf
This PR avoids creation/merging of empty sketches. Doing so, we are seeing a latency
improvement from 8s to < 500ms for 167 segments, and nominal entries of `1048576`. | pinot-core/src/main/java/org/apache/pinot/core/query/aggregation/function/DistinctCountThetaSketchAggregationFunction.java | Avoid redundant merge of empty sketches. (#5783) |
|
Java | apache-2.0 | 0c273bb5e4c5d1daa64e31b0e20ff7989c9276e9 | 0 | apache/geronimo-devtools | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.st.v30.core;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Timer;
import javax.enterprise.deploy.spi.Target;
import javax.enterprise.deploy.spi.exceptions.DeploymentManagerCreationException;
import javax.management.MBeanServerConnection;
import javax.management.ObjectName;
import javax.management.remote.JMXConnector;
import javax.management.remote.JMXConnectorFactory;
import javax.management.remote.JMXServiceURL;
import javax.naming.directory.NoSuchAttributeException;
import org.apache.geronimo.deployment.plugin.jmx.ExtendedDeploymentManager;
import org.apache.geronimo.deployment.plugin.jmx.JMXDeploymentManager;
import org.apache.geronimo.gbean.AbstractName;
import org.apache.geronimo.gbean.AbstractNameQuery;
import org.apache.geronimo.gbean.GBeanData;
import org.apache.geronimo.kernel.GBeanNotFoundException;
import org.apache.geronimo.kernel.InternalKernelException;
import org.apache.geronimo.kernel.Kernel;
import org.apache.geronimo.kernel.config.Configuration;
import org.apache.geronimo.kernel.config.InvalidConfigException;
import org.apache.geronimo.kernel.config.PersistentConfigurationList;
import org.apache.geronimo.kernel.repository.Artifact;
import org.apache.geronimo.st.core.GeronimoJMXConnectorFactory;
import org.apache.geronimo.st.core.GeronimoJMXConnectorFactory.JMXConnectorInfo;
import org.apache.geronimo.st.v30.core.UpdateServerStateTask;
import org.apache.geronimo.st.v30.core.commands.DeploymentCommandFactory;
import org.apache.geronimo.st.v30.core.internal.DependencyHelper;
import org.apache.geronimo.st.v30.core.internal.Messages;
import org.apache.geronimo.st.v30.core.internal.Trace;
import org.apache.geronimo.st.v30.core.operations.ISharedLibEntryCreationDataModelProperties;
import org.apache.geronimo.st.v30.core.operations.SharedLibEntryCreationOperation;
import org.apache.geronimo.st.v30.core.operations.SharedLibEntryDataModelProvider;
import org.apache.geronimo.st.v30.core.osgi.AriesHelper;
import org.apache.geronimo.st.v30.core.osgi.OSGiModuleHandler;
import org.apache.geronimo.system.jmx.KernelDelegate;
import org.eclipse.core.commands.ExecutionException;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.MultiStatus;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.Path;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.Job;
import org.eclipse.debug.core.DebugEvent;
import org.eclipse.debug.core.DebugPlugin;
import org.eclipse.debug.core.IDebugEventSetListener;
import org.eclipse.debug.core.ILaunch;
import org.eclipse.debug.core.ILaunchConfigurationWorkingCopy;
import org.eclipse.debug.core.ILaunchManager;
import org.eclipse.debug.core.model.IProcess;
import org.eclipse.debug.core.sourcelookup.AbstractSourceLookupDirector;
import org.eclipse.debug.core.sourcelookup.ISourceContainer;
import org.eclipse.debug.core.sourcelookup.containers.DefaultSourceContainer;
import org.eclipse.jdt.core.IClasspathEntry;
import org.eclipse.jdt.internal.launching.RuntimeClasspathEntry;
import org.eclipse.jdt.launching.IJavaLaunchConfigurationConstants;
import org.eclipse.jdt.launching.IRuntimeClasspathEntry;
import org.eclipse.jdt.launching.IVMInstall;
import org.eclipse.jdt.launching.JavaRuntime;
import org.eclipse.osgi.util.NLS;
import org.eclipse.wst.common.frameworks.datamodel.DataModelFactory;
import org.eclipse.wst.common.frameworks.datamodel.IDataModel;
import org.eclipse.wst.common.frameworks.datamodel.IDataModelOperation;
import org.eclipse.wst.server.core.IModule;
import org.eclipse.wst.server.core.IServer;
import org.eclipse.wst.server.core.IServerListener;
import org.eclipse.wst.server.core.ServerEvent;
import org.eclipse.wst.server.core.ServerPort;
import org.eclipse.wst.server.core.internal.IModulePublishHelper;
import org.eclipse.wst.server.core.internal.ProgressUtil;
import org.eclipse.wst.server.core.model.IModuleFile;
import org.eclipse.wst.server.core.model.IModuleFolder;
import org.eclipse.wst.server.core.model.IModuleResource;
import org.eclipse.wst.server.core.model.IModuleResourceDelta;
import org.eclipse.wst.server.core.model.ServerBehaviourDelegate;
import org.eclipse.wst.server.core.util.PublishHelper;
import org.eclipse.wst.server.core.util.SocketUtil;
/**
* @version $Rev$ $Date$
*/
public class GeronimoServerBehaviourDelegate extends ServerBehaviourDelegate implements IGeronimoServerBehavior, IModulePublishHelper {
public static final int TIMER_TASK_INTERVAL = 20;
public static final int TIMER_TASK_DELAY = 20;
private Kernel kernel = null;
protected Timer stateTimer = null;
protected SynchronizeProjectOnServerTask synchronizerTask = null;
protected PingThread pingThread;
protected transient IDebugEventSetListener processListener;
public static final String ERROR_SETUP_LAUNCH_CONFIGURATION = "errorInSetupLaunchConfiguration";
private PublishStateListener publishStateListener;
private Set<IProject> knownSourceProjects = null;
private DefaultModuleHandler defaultModuleHandler;
private OSGiModuleHandler osgiModuleHandler;
protected ClassLoader getContextClassLoader() {
return Kernel.class.getClassLoader();
}
/*
* (non-Javadoc)
*
* @see org.eclipse.wst.server.core.model.ServerBehaviourDelegate#setupLaunchConfiguration(org.eclipse.debug.core.ILaunchConfigurationWorkingCopy,
* org.eclipse.core.runtime.IProgressMonitor)
*/
public void setupLaunchConfiguration(ILaunchConfigurationWorkingCopy wc, IProgressMonitor monitor) throws CoreException {
if (isRemote())// No launch for remote servers.
return;
wc.setAttribute(IJavaLaunchConfigurationConstants.ATTR_MAIN_TYPE_NAME, getRuntimeClass());
GeronimoRuntimeDelegate runtime = getRuntimeDelegate();
IVMInstall vmInstall = runtime.getVMInstall();
if (vmInstall != null) {
wc.setAttribute(IJavaLaunchConfigurationConstants.ATTR_JRE_CONTAINER_PATH,
JavaRuntime.newJREContainerPath(vmInstall).toPortableString());
}
String existingProgArgs = null;
wc.setAttribute(ERROR_SETUP_LAUNCH_CONFIGURATION, (String)null);
try{
setupLaunchClasspath(wc, vmInstall);
existingProgArgs = wc.getAttribute(IJavaLaunchConfigurationConstants.ATTR_PROGRAM_ARGUMENTS, (String) null);
}catch (CoreException e){
// Throwing a CoreException at this time will not accomplish anything useful as WTP will
// will essentially ignore it. Instead set a flag in the configuration that can
// subsequently be checked when an attempt is made to launch the server in
// GeronimoLaunchConfigurationDelegate.launch(). At that point a CoreException will be
// thrown that WTP will handle properly and will display an error dialog which is
// exactly what we want the GEP user to see.
wc.setAttribute(ERROR_SETUP_LAUNCH_CONFIGURATION, e.getMessage());
}
GeronimoServerDelegate gsd = getServerDelegate();
String programArgs = gsd.getProgramArgs();
Trace.tracePoint("GeronimoServerBehaviourDelegate.v30", Activator.traceCore, "setupLaunchConfiguration serverProgramArgs", programArgs);
wc.setAttribute(IJavaLaunchConfigurationConstants.ATTR_PROGRAM_ARGUMENTS, programArgs);
/*
programArgs = getServerProgramArgs(existingProgArgs, getServerDelegate());
Trace.tracePoint("GeronimoServerBehaviourDelegate.v30", "setupLaunchConfiguration serverProgramArgs",
programArgs);
wc.setAttribute(IJavaLaunchConfigurationConstants.ATTR_PROGRAM_ARGUMENTS, programArgs);
*/
String vmArgs = gsd.getVMArgs();
Trace.tracePoint("GeronimoServerBehaviourDelegate.v30", Activator.traceCore, "setupLaunchConfiguration serverVMArgs", vmArgs);
wc.setAttribute(IJavaLaunchConfigurationConstants.ATTR_VM_ARGUMENTS, vmArgs);
}
/**
* @param launch
* @param launchMode
* @param monitor
* @throws CoreException
*/
synchronized protected void setupLaunch(ILaunch launch, String launchMode, IProgressMonitor monitor) throws CoreException {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.setupLaunch", launch, launchMode, monitor);
if (!SocketUtil.isLocalhost(getServer().getHost()))
return;
ServerPort[] ports = getServer().getServerPorts(null);
for (int i = 0; i < ports.length; i++) {
ServerPort sp = ports[i];
if (SocketUtil.isPortInUse(ports[i].getPort(), 5))
throw new CoreException(new Status(IStatus.ERROR, Activator.PLUGIN_ID, 0, Messages.bind(Messages.errorPortInUse, Integer.toString(sp.getPort()), sp.getName()), null));
}
stopUpdateServerStateTask();
setServerState(IServer.STATE_STARTING);
setMode(launchMode);
IServerListener listener = new IServerListener() {
public void serverChanged(ServerEvent event) {
int eventKind = event.getKind();
if ((eventKind & ServerEvent.STATE_CHANGE) != 0) {
int state = event.getServer().getServerState();
if (state == IServer.STATE_STARTED
|| state == IServer.STATE_STOPPED) {
GeronimoServerBehaviourDelegate.this.getServer().removeServerListener(this);
startUpdateServerStateTask();
}
}
}
};
getServer().addServerListener(listener);
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.setupLaunch");
}
/*
* (non-Javadoc)
*
* @see org.eclipse.wst.server.core.model.ServerBehaviourDelegate#stop(boolean)
*/
public synchronized void stop(final boolean force) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.stop", force);
IServer server = getServer();
if (server.getServerState() != IServer.STATE_STOPPED) {
setServerState(IServer.STATE_STOPPING);
}
// stop threads
stopPingThread();
stopSynchronizeProjectOnServerTask();
// request shutdown
stopKernel();
// wait for shutdown
if (!waitForStopped(60 * 1000) || force) {
ILaunch launch = server.getLaunch();
if (launch != null) {
Trace.trace(Trace.INFO, "Killing the geronimo server process", Activator.traceCore); //$NON-NLS-1$
try {
launch.terminate();
} catch (Exception e) {
Trace.trace(Trace.ERROR, "Error killing the geronimo server process", e, Activator.logCore); //$NON-NLS-1$
}
}
}
GeronimoConnectionFactory.getInstance().destroy(server);
stopImpl();
Trace.tracePoint("Exit", Activator.traceCore, "GeronimoServerBehaviourDelegate.stop");
}
private boolean waitForStopped(long timeout) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.waitForStopped", timeout);
long started = System.currentTimeMillis();
boolean stopped = false;
try {
while (System.currentTimeMillis() - started < timeout) {
if (isKernelAlive()) {
Thread.sleep(500);
} else {
stopped = true;
break;
}
}
} catch (InterruptedException e) {
// ignore
}
Trace.tracePoint("Exit", Activator.traceCore, "GeronimoServerBehaviourDelegate.waitForStopped", stopped);
return stopped;
}
private void setStatus(IModule[] module, IStatus status, MultiStatus multiStatus) {
if (status.isOK()) {
setModulePublishState(module, IServer.PUBLISH_STATE_NONE);
setModuleStatus(module, null);
} else {
multiStatus.add(status);
setModuleStatus(module, status);
setModulePublishState(module, IServer.PUBLISH_STATE_UNKNOWN);
}
}
void setKnownSourceProjects(Set<IProject> knownSourceProjects) {
this.knownSourceProjects = knownSourceProjects;
}
boolean hasKnownSourceProject(List<IModule[]> moduleList) {
if (knownSourceProjects != null) {
for (IModule[] modules : moduleList) {
for (IModule module : modules) {
IProject project = module.getProject();
if (project != null && !knownSourceProjects.contains(project)) {
Trace.trace(Trace.INFO, "Project " + project.getName() + " is not source lookup list.", Activator.traceCore); //$NON-NLS-1$
return false;
}
}
}
}
return true;
}
void resetSourceLookupList() {
Trace.trace(Trace.INFO, "Resetting source lookup list.", Activator.traceCore); //$NON-NLS-1$
// reset DefaultSourceContainer - that will force Eclipse to re-compute the source paths
AbstractSourceLookupDirector locator = (AbstractSourceLookupDirector) getServer().getLaunch().getSourceLocator();
ISourceContainer[] oldContainers = locator.getSourceContainers();
ISourceContainer[] newContainers = new ISourceContainer[oldContainers.length];
System.arraycopy(oldContainers, 0, newContainers, 0, oldContainers.length);
DefaultSourceContainer newDefaultContainer = new DefaultSourceContainer();
for (int i = 0; i < newContainers.length; i++) {
if (newDefaultContainer.getType().equals(newContainers[i].getType())) {
newContainers[i] = newDefaultContainer;
break;
}
}
locator.setSourceContainers(newContainers);
// reset knownSourceProjects as they will be set once Eclipse re-computes the source paths
knownSourceProjects = null;
}
/*
* Override this method to be able to process in-place shared lib entries and restart the shared lib configuration for all projects prior
* to publishing each IModule.
*
* This overridden method also fixes WTP Bugzilla 123676 to prevent duplicate repdeloys if both parent and child modules have deltas.
*
* (non-Javadoc)
* @see org.eclipse.wst.server.core.model.ServerBehaviourDelegate#publishModules(int, java.util.List, java.util.List, org.eclipse.core.runtime.MultiStatus, org.eclipse.core.runtime.IProgressMonitor)
*/
protected void publishModules(int kind, List modules, List deltaKind, MultiStatus multi, IProgressMonitor monitor) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.publishModules", publishKindToString(kind), Arrays.asList(modules), Arrays.asList(deltaKind), multi, monitor);
//
// WTP publishes modules in reverse alphabetical order which does not account for possible
// dependencies between modules. If necessary reorder the publish order of the modules
// based on any discovered dependencies.
//
if (modules != null && modules.size() > 0) {
List list = getOrderedModules(this.getServer(),modules, deltaKind);
modules = (List) list.get(0);
deltaKind = (List) list.get(1);
// trace output
if (Activator.getDefault().isDebugging()) {
for (int i = 0; i < modules.size(); i++) {
IModule[] module = (IModule[]) modules.get(i);
Trace.trace(Trace.INFO, i + " " + Arrays.asList(module).toString() + " "
+ deltaKindToString(((Integer) deltaKind.get(i)).intValue()), Activator.traceCore);
IModuleResourceDelta[] deltas = getPublishedResourceDelta(module);
traceModuleResourceDelta(deltas, " ");
}
}
}
IStatus status = Status.OK_STATUS;
if (modules != null && modules.size() > 0 && getGeronimoServer().isInPlaceSharedLib()) {
List<IModule> rootModules = new ArrayList<IModule>();
for(int i = 0; i < modules.size(); i++) {
IModule[] module = (IModule[]) modules.get(i);
if(!rootModules.contains(module[0])) {
rootModules.add(module[0]);
}
}
IModule[] toProcess = (IModule[])rootModules.toArray(new IModule[rootModules.size()]);
status = updateSharedLib(toProcess, ProgressUtil.getSubMonitorFor(monitor, 1000));
}
/*
* Build a map of root modules that need to be published. This is to ensure that
* we avoid redeploys and it guarantees that publishModule() is called once per
* deployed application.
*/
Map<String, ModuleDeltaList> publishMap = new LinkedHashMap<String, ModuleDeltaList>();
for (int i = 0; i < modules.size(); i++) {
IModule[] module = (IModule[]) modules.get(i);
Integer moduleDeltaKind = (Integer) deltaKind.get(i);
IModule rootModule = module[0];
ModuleDeltaList list = publishMap.get(rootModule.getId());
if (list == null) {
list = new ModuleDeltaList(rootModule);
publishMap.put(rootModule.getId(), list);
}
if (module.length == 1) {
list.setRootModuleDelta(moduleDeltaKind.intValue());
} else {
list.addChildModule(module, moduleDeltaKind.intValue());
}
}
// Reset source code lookup list - see GERONIMODEVTOOLS-763 for details.
if (ILaunchManager.DEBUG_MODE.equals(getServer().getMode()) && !hasKnownSourceProject(modules)) {
resetSourceLookupList();
}
if(status.isOK()) {
if (modules == null)
return;
int size = modules.size();
if (size == 0)
return;
if (monitor.isCanceled())
return;
// phase 1: see if the modified contents can copied/replaced
if (getServerDelegate().isNoRedeploy() && !isRemote()) {
Iterator<ModuleDeltaList> iterator = publishMap.values().iterator();
while (iterator.hasNext()) {
ModuleDeltaList moduleList = iterator.next();
IModule[] rootModule = moduleList.getRootModule();
if (GeronimoUtils.isEBAModule(rootModule[0]) || GeronimoUtils.isEarModule(rootModule[0])) {
if (moduleList.hasChangedChildModulesOnly(true)) {
boolean replacementPossible = true;
Map<IModule[], IStatus> statusMap = new HashMap<IModule[], IStatus>();
for (ModuleDelta moduleDelta : moduleList.getChildModules()) {
IModule bundleModule = moduleDelta.module[1];
if (moduleDelta.delta == CHANGED && (GeronimoUtils.isWebModule(bundleModule) || GeronimoUtils.isBundleModule(bundleModule))) {
// try to do replacement
status = tryFileReplace(moduleDelta.module);
if (status == null) {
// replacement was not possible
replacementPossible = false;
break;
} else {
statusMap.put(moduleDelta.module, status);
}
} else {
statusMap.put(moduleDelta.module, Status.OK_STATUS);
}
}
if (replacementPossible) {
// replacement was possible for all changed child modules - remove it from publish list
iterator.remove();
statusMap.put(rootModule, Status.OK_STATUS);
for (Map.Entry<IModule[], IStatus> entry : statusMap.entrySet()) {
setStatus(entry.getKey(), entry.getValue(), multi);
}
} else {
// replacement was not possible for at least one child module - redeploy the module
}
}
} else if (GeronimoUtils.isWebModule(rootModule[0]) || GeronimoUtils.isBundleModule(rootModule[0])) {
if (moduleList.getEffectiveRootDelta() == CHANGED) {
// contents changed - try to do replacement
status = tryFileReplace(rootModule);
if (status != null) {
// replacement was possible - remove it from publish list
iterator.remove();
setStatus(rootModule, status, multi);
} else {
// replacement was not possible - redeploy the module
}
}
}
}
}
// phase 2: re-deploy the modules
boolean refreshOSGiBundle = getServerDelegate().isRefreshOSGiBundle();
for (ModuleDeltaList moduleList : publishMap.values()) {
IModule[] rootModule = moduleList.getRootModule();
AbstractName ebaName = null;
if (refreshOSGiBundle &&
GeronimoUtils.isEBAModule(rootModule[0]) &&
moduleList.hasChangedChildModulesOnly(false) &&
(ebaName = getApplicationGBeanName(rootModule[0])) != null) {
List<IModule[]> changedModules = new ArrayList<IModule[]>();
List<IModule[]> unChangedModules = new ArrayList<IModule[]>();
for (ModuleDelta moduleDelta : moduleList.getChildModules()) {
if (moduleDelta.delta == CHANGED) {
changedModules.add(moduleDelta.module);
} else {
unChangedModules.add(moduleDelta.module);
}
}
status = refreshBundles(rootModule[0], ebaName, changedModules, ProgressUtil.getSubMonitorFor(monitor, 3000));
if (status != null && !status.isOK()) {
multi.add(status);
}
unChangedModules.add(rootModule);
for (IModule[] module : unChangedModules) {
setModulePublishState(module, IServer.PUBLISH_STATE_NONE);
setModuleStatus(module, null);
}
} else {
status = publishModule(kind, rootModule, moduleList.getEffectiveRootDelta(), ProgressUtil.getSubMonitorFor(monitor, 3000));
if (status != null && !status.isOK()) {
multi.add(status);
} else {
for (ModuleDelta moduleDelta : moduleList.getChildModules()) {
setModulePublishState(moduleDelta.module, IServer.PUBLISH_STATE_NONE);
setModuleStatus(moduleDelta.module, null);
}
}
}
}
} else {
multi.add(status);
}
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.publishModules");
}
private AbstractName getApplicationGBeanName(IModule ebaModule) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.getApplicationGBeanName", ebaModule);
IServer server = getServer();
AbstractName ebaName = null;
try {
String configId = DeploymentUtils.getConfigId(server, ebaModule);
ExtendedDeploymentManager dm = (ExtendedDeploymentManager) DeploymentCommandFactory.getDeploymentManager(server);
ebaName = dm.getApplicationGBeanName(Artifact.create(configId));
} catch (CoreException e) {
Trace.trace(Trace.WARNING, "Error getting gbean name", e, Activator.traceCore);
}
Trace.tracePoint("Exit", Activator.traceCore, "GeronimoServerBehaviourDelegate.getApplicationGBeanName", ebaName);
return ebaName;
}
private IStatus refreshBundles(IModule ebaModule, AbstractName ebaName, List<IModule[]> bundleModules, IProgressMonitor monitor) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.refreshBundles", ebaModule, ebaName, bundleModules, monitor);
if (monitor.isCanceled()) {
return Status.CANCEL_STATUS;
}
MultiStatus multiStatus = new MultiStatus(Activator.PLUGIN_ID, 0, "", null);
try {
ExtendedDeploymentManager dm = (ExtendedDeploymentManager) DeploymentCommandFactory.getDeploymentManager(getServer());
long[] bundleIds = dm.getEBAContentBundleIds(ebaName);
Map<String, Long> bundleMap = new HashMap<String, Long>();
for (long bundleId : bundleIds) {
String symbolicName = dm.getEBAContentBundleSymbolicName(ebaName, bundleId);
if (symbolicName != null) {
bundleMap.put(symbolicName, bundleId);
}
}
for (IModule[] bundleModule : bundleModules) {
if (monitor.isCanceled()) {
return Status.CANCEL_STATUS;
}
IStatus status = refreshBundle(ebaModule, bundleModule[1], ebaName, bundleMap);
setStatus(bundleModule, status, multiStatus);
}
} catch (Exception e) {
multiStatus.add(new Status(IStatus.ERROR, Activator.PLUGIN_ID, Messages.REFRESH_FAIL, e));
}
IStatus status;
if (multiStatus.isOK()) {
status = Status.OK_STATUS;
} else {
status = multiStatus;
}
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.refreshBundles", status);
return status;
}
private IStatus refreshBundle(IModule ebaModule, IModule bundleModule, AbstractName ebaName, Map<String, Long> bundleMap) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.refreshBundle", ebaModule, bundleModule, ebaName, bundleMap);
try {
String symbolicName = AriesHelper.getSymbolicName(bundleModule);
Long bundleId = bundleMap.get(symbolicName);
if (bundleId == null) {
return new Status(IStatus.ERROR, Activator.PLUGIN_ID, Messages.bind(Messages.REFRESH_NO_BUNDLE_FAIL,
new String[] {bundleModule.getProject().getName(), ebaModule.getProject().getName()}));
}
ExtendedDeploymentManager dm = (ExtendedDeploymentManager) DeploymentCommandFactory.getDeploymentManager(getServer());
/*
* Try class hot swap first and if it fails fallback to regular bundle update.
*/
if (!refreshBundleClasses(dm, ebaModule, bundleModule, ebaName, bundleId)) {
File file = DeploymentUtils.getTargetFile(getServer(), bundleModule);
dm.updateEBAContent(ebaName, bundleId, file);
}
} catch (Exception e) {
return new Status(IStatus.ERROR, Activator.PLUGIN_ID, Messages.REFRESH_FAIL, e);
}
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.refreshBundle");
return Status.OK_STATUS;
}
private boolean refreshBundleClasses(ExtendedDeploymentManager dm, IModule ebaModule, IModule bundleModule, AbstractName ebaName, long bundleId) throws Exception {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.refreshBundleClasses", ebaModule, bundleModule, ebaName, bundleId);
// check if class hot swap is supported
if (!dm.isRedefineClassesSupported()) {
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.refreshBundleClasses", "Class redefinition is not supported");
return false;
}
// ensure only classes have changed
IModuleResourceDelta[] delta = getPublishedResourceDelta(new IModule[] { ebaModule, bundleModule });
IModuleResource[] classResources = DeploymentUtils.getChangedClassResources(delta);
if (classResources == null) {
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.refreshBundleClasses", "Non-class resource modifications found");
return false;
}
// create temp. zip with the changes
File changeSetFile = DeploymentUtils.createChangeSetFile(classResources);
if (changeSetFile == null) {
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.refreshBundleClasses", "Error creating file with resource modifications");
return false;
}
// get document base for the module if it is expanded
String documentBase = getServerDelegate().isNoRedeploy() ? getWebModuleDocumentBase(bundleModule) : null;
// see if the classes can be hot swapped - update archive if module is not expanded
if (!dm.hotSwapEBAContent(ebaName, bundleId, changeSetFile, documentBase == null)) {
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.refreshBundleClasses", "Bundle class hot swap cannot be preformed");
changeSetFile.delete();
return false;
} else {
changeSetFile.delete();
}
if (documentBase != null) {
PublishHelper publishHelper = new PublishHelper(getTempDirectory().toFile());
IStatus[] statusArray = publishHelper.publishFull(classResources, new Path(documentBase), null);
if (statusArray != null) {
// XXX: in case of an error should we return false to force full re-deploy?
for (IStatus status : statusArray) {
if (!status.isOK()) {
Trace.trace(Trace.WARNING, "Error publishing changes: " + status.getMessage(), status.getException(), Activator.traceCore);
}
}
}
}
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.refreshBundleClasses", "Bundle class hot swap was succesfully preformed", documentBase);
return true;
}
private static class ModuleDelta {
private final IModule[] module;
private int delta = NO_CHANGE;
public ModuleDelta(IModule[] module, int delta) {
this.module = module;
this.delta = delta;
}
}
private static class ModuleDeltaList {
private ModuleDelta root;
private List<ModuleDelta> children;
public ModuleDeltaList(IModule rootModule) {
this.root = new ModuleDelta(new IModule [] {rootModule}, NO_CHANGE);
this.children = new ArrayList<ModuleDelta>();
}
public IModule[] getRootModule() {
return root.module;
}
public int getEffectiveRootDelta() {
if (root.delta == NO_CHANGE) {
for (ModuleDelta child : children) {
if (child.delta == ADDED || child.delta == REMOVED || child.delta == CHANGED) {
return CHANGED;
}
}
}
return root.delta;
}
public void setRootModuleDelta(int moduleDelta) {
root.delta = moduleDelta;
}
public void addChildModule(IModule[] module, int moduleDelta) {
children.add(new ModuleDelta(module, moduleDelta));
}
public List<ModuleDelta> getChildModules() {
return children;
}
public boolean hasChangedChildModulesOnly(boolean allChangedAllowed) {
int changed = getChangedChildModulesOnly();
if (changed > 0) {
if (allChangedAllowed) {
return true;
} else {
if (children.size() == 1) {
// special case: always return true if module only has one child module
return true;
} else {
return (changed < children.size());
}
}
}
return false;
}
/*
* Returns number of "changed" child modules.
* Returns -1 if a single "added" or "removed" child module is found or a root module is modified.
*/
public int getChangedChildModulesOnly() {
if (root.delta == NO_CHANGE) {
int changed = 0;
for (ModuleDelta child : children) {
if (child.delta == ADDED || child.delta == REMOVED) {
return -1;
} else if (child.delta == CHANGED) {
changed++;
}
}
return changed;
}
return -1;
}
}
/*
* This method is used to invoke DependencyHelper of different version
*/
protected List getOrderedModules(IServer server, List modules, List deltaKind) {
DependencyHelper dh = new DependencyHelper();
List list = dh.reorderModules(this.getServer(),modules, deltaKind);
return list;
}
/*
* (non-Javadoc)
*
* @see org.eclipse.wst.server.core.model.ServerBehaviourDelegate#publishModule(int,
* int, org.eclipse.wst.server.core.IModule[],
* org.eclipse.core.runtime.IProgressMonitor)
*/
public void publishModule(int kind, int deltaKind, IModule[] module, IProgressMonitor monitor) throws CoreException {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.publishModule", publishKindToString(kind), deltaKindToString(deltaKind), Arrays.asList(module), monitor);
try {
//NO_CHANGE need if app is associated but not started and no delta
if (deltaKind == NO_CHANGE && module.length == 1) {
invokeCommand(deltaKind, module[0], monitor);
}
else if (deltaKind == CHANGED || deltaKind == ADDED || deltaKind == REMOVED) {
invokeCommand(deltaKind, module[0], monitor);
}
setModuleStatus(module, null);
setModulePublishState(module, IServer.PUBLISH_STATE_NONE);
}
catch (CoreException e) {
//
// Set the parent module publish state to UNKNOWN so that WTP will display "Republish" instead
// "Synchronized" for the server state, and set the module status to an error message
// for the GEP end-user to see.
//
setModuleStatus(module, new Status(IStatus.ERROR, Activator.PLUGIN_ID, "Error publishing module to server"));
setModulePublishState(module, IServer.PUBLISH_STATE_UNKNOWN);
setModuleState(module, IServer.STATE_UNKNOWN);
throw e;
}
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.publishModule");
}
@Override
public void publishStart(IProgressMonitor monitor) throws CoreException {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.publishStart", monitor);
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.publishStart");
}
@Override
public void publishFinish(IProgressMonitor monitor) throws CoreException {
doPublishFinish(monitor);
}
private void doPublishFinish(IProgressMonitor monitor) throws CoreException {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.publishFinish", monitor);
IModule[] modules = this.getServer().getModules();
boolean allpublished = true;
for (int i = 0; i < modules.length; i++) {
int state = getServer().getModulePublishState(new IModule[] { modules[i] });
if (state != IServer.PUBLISH_STATE_NONE) {
allpublished = false;
break;
}
}
if (allpublished) {
setServerPublishState(IServer.PUBLISH_STATE_NONE);
setServerStatus(null);
} else {
setServerPublishState(IServer.PUBLISH_STATE_UNKNOWN);
setServerStatus(new Status(IStatus.ERROR, Activator.PLUGIN_ID, "Error publishing one or more modules to server"));
}
GeronimoConnectionFactory.getInstance().destroy(getServer());
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.publishFinish");
}
/**
* Initializes the Geronimo server delegate. This method is called by the server core framework
* to give delegates a chance to do their own initialization. As such, the GEP proper should
* never call this method.
*
* @param monitor a progress monitor, or <code>null</code> if progress reporting and cancellation
* are not desired
*/
@Override
protected void initialize(IProgressMonitor monitor) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.initialize", monitor);
publishStateListener = new PublishStateListener();
getServer().addServerListener(publishStateListener, ServerEvent.MODULE_CHANGE | ServerEvent.PUBLISH_STATE_CHANGE);
defaultModuleHandler = new DefaultModuleHandler(this);
osgiModuleHandler = new OSGiModuleHandler(this);
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.initialize");
}
/*
* GERONIMODEVTOOLS-715: Update parent module publish state to "publish" if a child
* publish state was changed to "publish". This is because GEP right now is redeploying the
* entire application instead of the individual bundle/module that has changed. Once that is
* supported this listener can be removed.
*/
private class PublishStateListener implements IServerListener {
public void serverChanged(ServerEvent event) {
if (event.getPublishState() == IServer.PUBLISH_STATE_INCREMENTAL ||
event.getPublishState() == IServer.PUBLISH_STATE_FULL) {
// reset server status in case it was set
setServerStatus(null);
IModule[] modules = event.getModule();
if (modules.length > 1) {
if (getServer().getServerState() == IServer.STATE_STARTED) {
setModulePublishState(event.getModule(), IServer.PUBLISH_STATE_NONE);
setModuleStatus(event.getModule(), new Status(IStatus.OK, Activator.PLUGIN_ID, Messages.moduleModified));
} else {
setModulePublishState(event.getModule(), IServer.PUBLISH_STATE_UNKNOWN);
setModuleStatus(event.getModule(), null);
}
IModule[] newModules = new IModule[modules.length - 1];
System.arraycopy(modules, 0, newModules, 0, newModules.length);
// update parent module publish state to "publish"
setModulePublishState(newModules, event.getPublishState());
// reset parent module status message
setModuleStatus(newModules, null);
}
}
}
}
/*
* (non-Javadoc)
*
* @see org.eclipse.wst.server.core.model.ServerBehaviourDelegate#dispose()
*/
public void dispose() {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.dispose");
stopUpdateServerStateTask();
stopSynchronizeProjectOnServerTask();
if (publishStateListener != null) {
getServer().removeServerListener(publishStateListener);
}
Trace.tracePoint("Exit", Activator.traceCore, "GeronimoServerBehaviourDelegate.dispose");
}
public String getRuntimeClass() {
return "org.apache.geronimo.cli.daemon.DaemonCLI";
}
public void setServerStarted() {
setServerState(IServer.STATE_STARTED);
GeronimoConnectionFactory.getInstance().destroy(getServer());
startSynchronizeProjectOnServerTask();
}
public void setServerStopped() {
setServerState(IServer.STATE_STOPPED);
resetModuleState();
stopSynchronizeProjectOnServerTask();
if (defaultModuleHandler != null) {
defaultModuleHandler.serverStopped();
}
if (osgiModuleHandler != null) {
osgiModuleHandler.serverStopped();
}
GeronimoConnectionFactory.getInstance().destroy(getServer());
}
public IGeronimoServer getGeronimoServer() {
return (IGeronimoServer) getServer().loadAdapter(IGeronimoServer.class, null);
}
protected void stopImpl() {
if (processListener != null) {
DebugPlugin.getDefault().removeDebugEventListener(processListener);
processListener = null;
}
setServerState(IServer.STATE_STOPPED);
resetModuleState();
}
private void resetModuleState() {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.resetModuleState");
IModule[] modules = getServer().getModules();
for (int i = 0; i < modules.length; i++) {
IModule[] module = new IModule[] { modules[i] };
setModuleState(module, IServer.STATE_STOPPED);
}
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.resetModuleState");
}
protected AbstractModuleHandler getModuleHandler(IModule module) {
return (GeronimoUtils.isBundleModule(module) || GeronimoUtils.isFragmentBundleModule(module)) ? osgiModuleHandler : defaultModuleHandler;
}
protected void invokeCommand(int deltaKind, IModule module, IProgressMonitor monitor) throws CoreException {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.invokeCommand", deltaKindToString(deltaKind), module.getName());
AbstractModuleHandler moduleHandler = getModuleHandler(module);
ClassLoader old = Thread.currentThread().getContextClassLoader();
try {
ClassLoader cl = getContextClassLoader();
if (cl != null)
Thread.currentThread().setContextClassLoader(cl);
switch (deltaKind) {
case ADDED: {
moduleHandler.doAdded(module, monitor);
break;
}
case CHANGED: {
moduleHandler.doChanged(module, monitor);
break;
}
case REMOVED: {
moduleHandler.doRemoved(module, monitor);
break;
}
case NO_CHANGE: {
moduleHandler.doNoChange(module, monitor);
break;
}
default:
throw new IllegalArgumentException();
}
} catch (CoreException e) {
throw e;
} catch (Exception e) {
e.printStackTrace();
} finally {
Thread.currentThread().setContextClassLoader(old);
}
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.invokeCommand");
}
private String getWebModuleDocumentBase(IModule webModule) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.getWebModuleDocumentBase", webModule);
if (webModule.isExternal()) {
Trace.tracePoint("Exit", Activator.traceCore, "GeronimoServerBehaviourDelegate.getWebModuleDocumentBase", "External module");
return null;
}
String contextPath = getServerDelegate().getContextPath(webModule);
if (contextPath == null) {
Trace.tracePoint("Exit", Activator.traceCore, "GeronimoServerBehaviourDelegate.getWebModuleDocumentBase", "Context path is null");
return null;
}
String documentBase = getWebModuleDocumentBase(contextPath);
Trace.tracePoint("Exit", Activator.traceCore, "GeronimoServerBehaviourDelegate.getWebModuleDocumentBase", contextPath, documentBase);
return documentBase;
}
private IStatus tryFileReplace(IModule[] module) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.tryFileReplace", module.toString());
IModule webModule = module[module.length - 1];
String documentBase = getWebModuleDocumentBase(webModule);
if (documentBase == null ) {
Trace.tracePoint("Exit", Activator.traceCore, "GeronimoServerBehaviourDelegate.tryFileReplace", "Document base is null");
return null;
}
List<IModuleResourceDelta> modifiedFiles = findModifiedFiles(module);
if (modifiedFiles == null) {
Trace.tracePoint("Exit", Activator.traceCore, "GeronimoServerBehaviourDelegate.tryFileReplace", "Some modified files cannot be replaced");
return null;
}
Trace.trace(Trace.INFO, "Modified files: " + modifiedFiles, Activator.logCore);
IStatus status = findAndReplaceFiles(webModule, modifiedFiles, documentBase);
Trace.tracePoint("Exit", Activator.traceCore, "GeronimoServerBehaviourDelegate.tryFileReplace", status);
return status;
}
private List<IModuleResourceDelta> findModifiedFiles(IModule[] module) {
IModuleResourceDelta[] deltaArray = getPublishedResourceDelta(module);
GeronimoServerDelegate delegate = getServerDelegate();
List<String> includes = delegate.getNoRedeployFilePatternsAsList(true);
List<String> excludes = delegate.getNoRedeployFilePatternsAsList(false);
List<IModuleResourceDelta> modifiedFiles = new ArrayList<IModuleResourceDelta>();
for (IModuleResourceDelta delta : deltaArray) {
List<IModuleResourceDelta> files = DeploymentUtils.getAffectedFiles(delta, includes, excludes);
// if null then some other files were changed that we cannot just copy/replace.
if (files == null) {
return null;
} else {
modifiedFiles.addAll(files);
}
}
return modifiedFiles;
}
/*
* This method is used to replace updated files without redeploying the entire module.
*/
private IStatus findAndReplaceFiles(IModule module, List<IModuleResourceDelta> modifiedFiles, String documentBase) {
Trace.trace(Trace.INFO, "Replacing updated files for " + module.getName() + " module.", Activator.logCore);
String ch = File.separator;
byte[] buffer = new byte[10 * 1024];
int bytesRead;
for (IModuleResourceDelta deltaModule : modifiedFiles) {
IModuleFile moduleFile = (IModuleFile) deltaModule.getModuleResource();
StringBuilder target = new StringBuilder(documentBase);
target.append(ch);
String relativePath = moduleFile.getModuleRelativePath().toOSString();
if (relativePath != null && relativePath.length() != 0) {
target.append(relativePath);
target.append(ch);
}
target.append(moduleFile.getName());
File file = new File(target.toString());
if(! file.isAbsolute()) {
file = getServerResource(IGeronimoServerBehavior.VAR_CATALINA_DIR + target.toString()).toFile();
}
switch (deltaModule.getKind()) {
case IModuleResourceDelta.REMOVED:
if (file.exists()) {
file.delete();
}
break;
case IModuleResourceDelta.ADDED:
case IModuleResourceDelta.CHANGED:
File parentFile = file.getParentFile();
if (parentFile != null && !parentFile.exists()) {
if (!parentFile.mkdirs()) {
Trace.trace(Trace.ERROR, "Cannot create target directory: " + parentFile, Activator.logCore);
return new Status(IStatus.ERROR, Activator.PLUGIN_ID, "Cannot create target directory", null);
}
}
String sourceFile = relativePath;
InputStream in = null;
FileOutputStream out = null;
try {
IFile srcIFile = (IFile) moduleFile.getAdapter(IFile.class);
if (srcIFile != null) {
in = srcIFile.getContents();
} else {
File srcFile = (File) moduleFile.getAdapter(File.class);
in = new FileInputStream(srcFile);
}
out = new FileOutputStream(file);
while ((bytesRead = in.read(buffer)) > 0) {
out.write(buffer, 0, bytesRead);
}
} catch (FileNotFoundException e) {
Trace.trace(Trace.ERROR, "Cannot find file to copy: " + sourceFile, e, Activator.logCore);
return new Status(IStatus.ERROR, Activator.PLUGIN_ID, "Cannot find file " + sourceFile, e);
} catch (IOException e) {
Trace.trace(Trace.ERROR, "Cannot copy file: " + sourceFile, e, Activator.logCore);
return new Status(IStatus.ERROR, Activator.PLUGIN_ID, "Cannot copy file " + sourceFile, e);
} catch (CoreException e) {
Trace.trace(Trace.ERROR, "Cannot copy file: " + sourceFile, e, Activator.logCore);
return e.getStatus();
} finally {
if (in != null) {
try { in.close(); } catch (IOException ignore) {}
}
if (out != null) {
try { out.close(); } catch (IOException ignore) {}
}
}
break;
}
}
return Status.OK_STATUS;
}
public Map getServerInstanceProperties() {
return getRuntimeDelegate().getServerInstanceProperties();
}
protected GeronimoRuntimeDelegate getRuntimeDelegate() {
GeronimoRuntimeDelegate rd = (GeronimoRuntimeDelegate) getServer().getRuntime().getAdapter(GeronimoRuntimeDelegate.class);
if (rd == null)
rd = (GeronimoRuntimeDelegate) getServer().getRuntime().loadAdapter(GeronimoRuntimeDelegate.class, new NullProgressMonitor());
return rd;
}
protected GeronimoServerDelegate getServerDelegate() {
GeronimoServerDelegate sd = (GeronimoServerDelegate) getServer().getAdapter(GeronimoServerDelegate.class);
if (sd == null)
sd = (GeronimoServerDelegate) getServer().loadAdapter(GeronimoServerDelegate.class, new NullProgressMonitor());
return sd;
}
protected boolean isRemote() {
return getServer().getServerType().supportsRemoteHosts()
&& !SocketUtil.isLocalhost(getServer().getHost());
}
protected void setupLaunchClasspath(ILaunchConfigurationWorkingCopy wc, IVMInstall vmInstall) throws CoreException {
List<IRuntimeClasspathEntry> cp = new ArrayList<IRuntimeClasspathEntry>();
String version = getServer().getRuntime().getRuntimeType().getVersion();
if (version.startsWith("3")) {
//get required jar file
IPath libPath = getServer().getRuntime().getLocation().append("/lib");
for (String jarFile: libPath.toFile().list()){
IPath serverJar = libPath.append("/"+jarFile);
cp.add(JavaRuntime.newArchiveRuntimeClasspathEntry(serverJar));
}
}else{
//for 1.1,2.0,2.1,2.2
IPath serverJar = getServer().getRuntime().getLocation().append("/bin/server.jar");
cp.add(JavaRuntime.newArchiveRuntimeClasspathEntry(serverJar));
}
// merge existing classpath with server classpath
IRuntimeClasspathEntry[] existingCps = JavaRuntime.computeUnresolvedRuntimeClasspath(wc);
for (int i = 0; i < existingCps.length; i++) {
Trace.trace(Trace.INFO, "cpentry: " + cp , Activator.traceCore);
if (cp.contains(existingCps[i]) == false) {
cp.add(existingCps[i]);
}
}
//
// Add classpath entries from any selected classpath containers
//
if ( getGeronimoServer().isSelectClasspathContainers()) {
List<String> containers = getGeronimoServer().getClasspathContainers();
for ( String containerPath : containers ) {
List<IClasspathEntry> cpes = ClasspathContainersHelper.queryWorkspace( containerPath );
for ( IClasspathEntry cpe : cpes ) {
RuntimeClasspathEntry rcpe = new RuntimeClasspathEntry( cpe );
Trace.trace(Trace.INFO, "Classpath Container Entry: " + rcpe , Activator.traceCore);
if (cp.contains(rcpe) == false) {
cp.add( rcpe );
}
}
}
}
wc.setAttribute(IJavaLaunchConfigurationConstants.ATTR_CLASSPATH, convertCPEntryToMemento(cp));
wc.setAttribute(IJavaLaunchConfigurationConstants.ATTR_DEFAULT_CLASSPATH, false);
}
private List<String> convertCPEntryToMemento(List<IRuntimeClasspathEntry> cpEntryList) {
List<String> list = new ArrayList<String>(cpEntryList.size());
Iterator<IRuntimeClasspathEntry> iterator = cpEntryList.iterator();
while (iterator.hasNext()) {
IRuntimeClasspathEntry entry = iterator.next();
try {
list.add(entry.getMemento());
} catch (CoreException e) {
Trace.trace(Trace.ERROR, "Could not resolve classpath entry: "
+ entry, e, Activator.logCore);
}
}
return list;
}
public void setProcess(final IProcess newProcess) {
if (newProcess == null) {
return;
}
if (processListener != null) {
DebugPlugin.getDefault().removeDebugEventListener(processListener);
}
processListener = new IDebugEventSetListener() {
public void handleDebugEvents(DebugEvent[] events) {
if (events != null) {
int size = events.length;
for (int i = 0; i < size; i++) {
if (newProcess.equals(events[i].getSource()) && events[i].getKind() == DebugEvent.TERMINATE) {
stopImpl();
}
}
}
}
};
DebugPlugin.getDefault().addDebugEventListener(processListener);
}
protected void startPingThread() {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.startPingThread");
pingThread = new PingThread(this, getServer());
pingThread.start();
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.startPingThread");
}
protected void stopPingThread() {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.stopPingThread");
if (pingThread != null) {
pingThread.interrupt();
pingThread = null;
}
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.stopPingThread");
}
protected Kernel getKernel() throws SecurityException {
if (kernel == null) {
try {
MBeanServerConnection connection = getServerConnection();
if (connection != null)
kernel = new KernelDelegate(connection);
} catch (SecurityException e) {
throw e;
} catch (Exception e) {
Trace.trace(Trace.INFO, "Kernel connection failed. "
+ e.getMessage(), Activator.traceCore);
}
}
return kernel;
}
private void stopKernel() {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.stopKernel");
try {
MBeanServerConnection connection = getServerConnection();
connection.invoke(getFrameworkMBean(connection), "stopBundle",
new Object[] { 0 }, new String[] { long.class.getName() });
} catch (Exception e) {
Trace.trace(Trace.ERROR, "Error while requesting server shutdown", e, Activator.traceCore);
}
Trace.tracePoint("Exit", Activator.traceCore, "GeronimoServerBehaviourDelegate.stopKernel");
}
public boolean isKernelAlive() {
try {
return getKernel() != null && kernel.isRunning();
} catch (SecurityException e) {
Trace.trace(Trace.ERROR, "Invalid username and/or password.", e, Activator.logCore);
pingThread.interrupt();
if (getServer().getServerState() != IServer.STATE_STOPPED) {
forceStopJob(true,e);
}
} catch (Exception e) {
Trace.trace(Trace.WARNING, "Geronimo Server may have been terminated manually outside of workspace.", e, Activator.logCore);
kernel = null;
}
return false;
}
private void forceStopJob(boolean b, final SecurityException e) {
/*
*
* Currently, there is another Status is returned by StartJob in Server.
* The message doesn't contain reason for the exception.
* So this job is created to show a message(Invalid username and/or password) to user.
*
* TODO: Need a method to remove the error message thrown by StartJob in Server.
*
*/
String jobName = NLS.bind(org.eclipse.wst.server.core.internal.Messages.errorStartFailed, getServer().getName());
//This message has different variable names in WTP 3.0 and 3.1, so we define it here instead of using that in WTP
final String jobStartingName = NLS.bind("Starting {0}", getServer().getName());
new Job(jobName) {
@Override
protected IStatus run(IProgressMonitor monitor) {
MultiStatus multiStatus = new MultiStatus(Activator.PLUGIN_ID, 0, jobStartingName, null);
multiStatus.add(new Status(IStatus.ERROR,Activator.PLUGIN_ID,0,"Invalid username and/or password.",e));
try {
stop(true);
} catch (Exception e){
multiStatus.add(new Status(IStatus.ERROR,Activator.PLUGIN_ID,0,"Failed to stop server",e));
}
return multiStatus;
}
}.schedule();
}
public boolean isFullyStarted() {
if (isKernelAlive()) {
AbstractNameQuery query = new AbstractNameQuery(PersistentConfigurationList.class.getName());
Set<AbstractName> configLists = kernel.listGBeans(query);
if (!configLists.isEmpty()) {
AbstractName on = (AbstractName) configLists.toArray()[0];
try {
Boolean b = (Boolean) kernel.getAttribute(on, "kernelFullyStarted");
return b.booleanValue();
} catch (GBeanNotFoundException e) {
// ignore
} catch (NoSuchAttributeException e) {
// ignore
} catch (Exception e) {
e.printStackTrace();
}
} else {
Trace.trace(Trace.INFO, "configLists is empty", Activator.traceCore);
}
}
return false;
}
@Override
public IPath getServerResource(String path) {
IPath serverRoot = getServer().getRuntime().getLocation();
return serverRoot.append(path);
}
public void startUpdateServerStateTask() {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.startUpdateServerStateTask", getServer().getName());
stateTimer = new Timer(true);
stateTimer.schedule(new UpdateServerStateTask(this, getServer()), 0, TIMER_TASK_INTERVAL * 1000);
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.startUpdateServerStateTask");
}
public void startSynchronizeProjectOnServerTask() {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.startSynchronizeProjectOnServerTask", getServer().getName());
if (synchronizerTask != null) {
synchronizerTask.stop();
}
synchronizerTask = new SynchronizeProjectOnServerTask(this, getServer());
synchronizerTask.start();
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.startSynchronizeProjectOnServerTask");
}
public void stopUpdateServerStateTask() {
Trace.tracePoint("Entry", "GeronimoServerBehaviourDelegate.stopUpdateServerStateTask", Activator.traceCore);
if (stateTimer != null) {
stateTimer.cancel();
stateTimer = null;
}
Trace.tracePoint("Exit ", "GeronimoServerBehaviourDelegate.stopUpdateServerStateTask", Activator.traceCore);
}
public void stopSynchronizeProjectOnServerTask() {
Trace.tracePoint("Entry", "GeronimoServerBehaviourDelegate.stopSynchronizeProjectOnServerTask", Activator.traceCore);
if (synchronizerTask != null) {
synchronizerTask.stop();
synchronizerTask = null;
}
Trace.tracePoint("Exit ", "GeronimoServerBehaviourDelegate.stopSynchronizeProjectOnServerTask", Activator.traceCore);
}
protected IPath getModulePath(IModule[] module, URL baseURL) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.getModulePath", Arrays.asList(module), baseURL);
IPath modulePath = new Path(baseURL.getFile());
if (module.length == 2) {
IModule workingModule = module[module.length - 1];
modulePath = modulePath.append(workingModule.getName());
if (GeronimoUtils.isWebModule(workingModule)) {
modulePath = modulePath.addFileExtension("war");
} else if (GeronimoUtils.isEjbJarModule(workingModule)) {
modulePath = modulePath.addFileExtension("jar");
} else if (GeronimoUtils.isRARModule(workingModule)) {
modulePath = modulePath.addFileExtension("rar");
} else if (GeronimoUtils.isEarModule(workingModule)) {
modulePath = modulePath.addFileExtension("ear");
} else if (GeronimoUtils.isAppClientModule(workingModule)) {
modulePath = modulePath.addFileExtension("jar");
}
}
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.getModulePath", modulePath);
return modulePath;
}
private Kernel getDeploymentManagerKernel() {
GeronimoConnectionFactory connectionFactory = GeronimoConnectionFactory.getInstance();
try {
JMXDeploymentManager manager =
(JMXDeploymentManager) connectionFactory.getDeploymentManager(getServer());
return manager.getKernel();
} catch (DeploymentManagerCreationException e) {
Trace.trace(Trace.WARNING, "Error getting kernel from deployment manager", e, Activator.logCore);
return null;
}
}
public MBeanServerConnection getServerConnection() throws Exception {
String host = getServer().getHost();
String user = getGeronimoServer().getAdminID();
String password = getGeronimoServer().getAdminPassword();
String port = getGeronimoServer().getRMINamingPort();
JMXConnectorInfo connectorInfo = new JMXConnectorInfo(user, password, host, port);
// Using the classloader that loads the current's class as the default classloader when creating the JMXConnector
JMXConnector jmxConnector = GeronimoJMXConnectorFactory.create(connectorInfo, this.getClass().getClassLoader());
return jmxConnector.getMBeanServerConnection();
}
public ObjectName getMBean(MBeanServerConnection connection, String mbeanName, String name) throws Exception {
Set<ObjectName> objectNameSet =
connection.queryNames(new ObjectName(mbeanName), null);
if (objectNameSet.isEmpty()) {
throw new Exception(Messages.bind(Messages.mBeanNotFound, name));
} else if (objectNameSet.size() == 1) {
return objectNameSet.iterator().next();
} else {
throw new Exception(Messages.bind(Messages.multipleMBeansFound, name));
}
}
private ObjectName getFrameworkMBean(MBeanServerConnection connection) throws Exception {
return getMBean(connection, "osgi.core:type=framework,*", "Framework");
}
public Target[] getTargets() {
return null;
}
public static String deltaKindToString(int kind) {
switch(kind) {
case NO_CHANGE:
return "NO_CHANGE";
case ADDED:
return "ADDED";
case CHANGED:
return "CHANGED";
case REMOVED:
return "REMOVED";
}
return Integer.toString(kind);
}
public static String publishKindToString(int kind) {
switch(kind) {
case IServer.PUBLISH_AUTO:
return "Auto";
case IServer.PUBLISH_CLEAN:
return "Clean";
case IServer.PUBLISH_FULL:
return "Full";
case IServer.PUBLISH_INCREMENTAL:
return "Incremental";
}
return Integer.toString(kind);
}
public static void traceModuleResourceDelta(IModuleResourceDelta[] deltaArray, String tab) {
if (deltaArray != null) {
for (IModuleResourceDelta delta : deltaArray) {
int kind = delta.getKind();
IModuleResource resource = delta.getModuleResource();
Trace.trace(Trace.INFO, tab + resource.getName() + " " + deltaKindToString(kind), Activator.traceCore);
if (resource instanceof IModuleFile) {
// ignore
} else if (resource instanceof IModuleFolder) {
IModuleResourceDelta[] childDeltaArray = delta.getAffectedChildren();
traceModuleResourceDelta(childDeltaArray, tab + " ");
}
}
}
}
public String getConfigId(IModule module) throws Exception {
return getGeronimoServer().getVersionHandler().getConfigID(module);
}
private IStatus updateSharedLib(IModule[] module, IProgressMonitor monitor) {
IDataModel model = DataModelFactory.createDataModel(new SharedLibEntryDataModelProvider());
model.setProperty(ISharedLibEntryCreationDataModelProperties.MODULES, module);
model.setProperty(ISharedLibEntryCreationDataModelProperties.SERVER, getServer());
IDataModelOperation op = new SharedLibEntryCreationOperation(model);
try {
op.execute(monitor, null);
} catch (ExecutionException e) {
return new Status(IStatus.ERROR, Activator.PLUGIN_ID, 0, e.getMessage(), e.getCause());
}
return Status.OK_STATUS;
}
@Override
public void startModule(IModule[] module, IProgressMonitor monitor) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.startModule", Arrays.asList(module));
try {
getModuleHandler(module[0]).doStartModule(module, monitor);
} catch (Exception e) {
Trace.trace(Trace.ERROR, "Error starting module " + module[0].getName(), e, Activator.logCore);
throw new RuntimeException("Error starting module " + module[0].getName(), e);
}
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.startModule");
}
@Override
public void stopModule(IModule[] module, IProgressMonitor monitor) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.stopModule", Arrays.asList(module));
try {
getModuleHandler(module[0]).doStopModule(module, monitor);
} catch (Exception e) {
Trace.trace(Trace.ERROR, "Error stopping module " + module[0].getName(), e, Activator.logCore);
throw new RuntimeException("Error stopping module " + module[0].getName(), e);
}
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.stopModule");
}
@Override
public void restartModule(IModule[] module, IProgressMonitor monitor) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.restartModule", Arrays.asList(module));
try {
getModuleHandler(module[0]).doRestartModule(module, monitor);
} catch (Exception e) {
Trace.trace(Trace.ERROR, "Error restarting module " + module[0].getName(), e, Activator.logCore);
throw new RuntimeException("Error restarting module " + module[0].getName(), e);
}
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.restartModule");
}
@Override
public boolean canControlModule(IModule[] module) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.canControlModule", Arrays.asList(module));
// Enable start/stop for top-level modules only
if (module.length == 1) {
if (GeronimoUtils.isFragmentBundleModule(module[0])) {
// fragment bundles cannot be started/stopped
return false;
} else {
return true;
}
} else {
return false;
}
}
public void setModulesState(IModule[] module, int state) {
setModuleState(module, state);
}
public IPath getPublishDirectory(IModule[] module) {
if (module == null || module.length == 0)
return null;
if (getGeronimoServer().isRunFromWorkspace()) {
// TODO fix me, see if project root, component root, or output
// container should be returned
return module[module.length - 1].getProject().getLocation();
} else {
ClassLoader old = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(getContextClassLoader());
String configId = getConfigId(module[0]);
Artifact artifact = Artifact.create(configId);
AbstractName name = Configuration.getConfigurationAbstractName(artifact);
GBeanData data = kernel.getGBeanData(name);
URL url = (URL) data.getAttribute("baseURL");
return getModulePath(module, url);
} catch (InvalidConfigException e) {
e.printStackTrace();
} catch (GBeanNotFoundException e) {
e.printStackTrace();
} catch (InternalKernelException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
} finally {
Thread.currentThread().setContextClassLoader(old);
}
}
return null;
}
// TODO: this can be cached
public String getWebModuleDocumentBase(String contextPath) {
Kernel kernel = getDeploymentManagerKernel();
if (kernel == null) {
Trace.trace(Trace.WARNING, "Error getting web module document base - no kernel", null, Activator.logCore);
return null;
}
Map<String, String> map = Collections.singletonMap("j2eeType", "WebModule");
if (!contextPath.startsWith("/")) {
contextPath = "/" + contextPath;
}
AbstractNameQuery query = new AbstractNameQuery(null, map, Collections.EMPTY_SET);
Set<AbstractName> webModuleNames = kernel.listGBeans(query);
for (AbstractName name : webModuleNames) {
try {
String moduleContextPath = (String) kernel.getAttribute(name, "contextPath");
if (contextPath.equals(moduleContextPath)) {
String docBase = (String) kernel.getAttribute(name, "docBase");
return (docBase != null && docBase.length() > 0) ? docBase : null;
}
} catch (GBeanNotFoundException e) {
// ignore
} catch (NoSuchAttributeException e) {
// ignore
} catch (Exception e) {
Trace.trace(Trace.WARNING, "Error getting web module document base", e, Activator.logCore);
}
}
return null;
}
public OSGiModuleHandler getOsgiModuleHandler() {
return osgiModuleHandler;
}
public boolean isPublished(IModule[] module) {
return super.hasBeenPublished(module);
}
public boolean hasChanged(IModule rootModule) {
IModule[] module = new IModule [] { rootModule };
IModuleResourceDelta[] deltaArray = getPublishedResourceDelta(module);
if (deltaArray != null && deltaArray.length > 0) {
return true;
}
IModule[] childModules = getServerDelegate().getChildModules(module);
if (childModules != null) {
for (IModule childModule : childModules) {
deltaArray = getPublishedResourceDelta(new IModule[] {rootModule, childModule});
if (deltaArray != null && deltaArray.length > 0) {
return true;
}
}
}
return false;
}
public Set<String> getModifiedConfigIds() {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.getModifiedConfigIds");
IServer server = getServer();
Set<String> configIds = new HashSet<String>();
IModule[] modules = server.getModules();
if (modules != null) {
for (IModule module : modules) {
IModule[] rootModule = new IModule[] { module };
// only consider modules that have been published and have changed
if (isPublished(rootModule) && hasChanged(module)) {
try {
String configId = DeploymentUtils.getConfigId(server, module);
configIds.add(configId);
} catch (CoreException e) {
// ignore
}
}
}
}
Trace.tracePoint("Exit", Activator.traceCore, "GeronimoServerBehaviourDelegate.getModifiedConfigIds", configIds);
return configIds;
}
}
| plugins/org.apache.geronimo.st.v30.core/src/main/java/org/apache/geronimo/st/v30/core/GeronimoServerBehaviourDelegate.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.st.v30.core;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Timer;
import javax.enterprise.deploy.spi.Target;
import javax.enterprise.deploy.spi.exceptions.DeploymentManagerCreationException;
import javax.management.MBeanServerConnection;
import javax.management.ObjectName;
import javax.management.remote.JMXConnector;
import javax.management.remote.JMXConnectorFactory;
import javax.management.remote.JMXServiceURL;
import javax.naming.directory.NoSuchAttributeException;
import org.apache.geronimo.deployment.plugin.jmx.ExtendedDeploymentManager;
import org.apache.geronimo.deployment.plugin.jmx.JMXDeploymentManager;
import org.apache.geronimo.gbean.AbstractName;
import org.apache.geronimo.gbean.AbstractNameQuery;
import org.apache.geronimo.gbean.GBeanData;
import org.apache.geronimo.kernel.GBeanNotFoundException;
import org.apache.geronimo.kernel.InternalKernelException;
import org.apache.geronimo.kernel.Kernel;
import org.apache.geronimo.kernel.config.Configuration;
import org.apache.geronimo.kernel.config.InvalidConfigException;
import org.apache.geronimo.kernel.config.PersistentConfigurationList;
import org.apache.geronimo.kernel.repository.Artifact;
import org.apache.geronimo.st.core.GeronimoJMXConnectorFactory;
import org.apache.geronimo.st.core.GeronimoJMXConnectorFactory.JMXConnectorInfo;
import org.apache.geronimo.st.v30.core.UpdateServerStateTask;
import org.apache.geronimo.st.v30.core.commands.DeploymentCommandFactory;
import org.apache.geronimo.st.v30.core.internal.DependencyHelper;
import org.apache.geronimo.st.v30.core.internal.Messages;
import org.apache.geronimo.st.v30.core.internal.Trace;
import org.apache.geronimo.st.v30.core.operations.ISharedLibEntryCreationDataModelProperties;
import org.apache.geronimo.st.v30.core.operations.SharedLibEntryCreationOperation;
import org.apache.geronimo.st.v30.core.operations.SharedLibEntryDataModelProvider;
import org.apache.geronimo.st.v30.core.osgi.AriesHelper;
import org.apache.geronimo.st.v30.core.osgi.OSGiModuleHandler;
import org.apache.geronimo.system.jmx.KernelDelegate;
import org.eclipse.core.commands.ExecutionException;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.MultiStatus;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.Path;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.Job;
import org.eclipse.debug.core.DebugEvent;
import org.eclipse.debug.core.DebugPlugin;
import org.eclipse.debug.core.IDebugEventSetListener;
import org.eclipse.debug.core.ILaunch;
import org.eclipse.debug.core.ILaunchConfigurationWorkingCopy;
import org.eclipse.debug.core.ILaunchManager;
import org.eclipse.debug.core.model.IProcess;
import org.eclipse.debug.core.sourcelookup.AbstractSourceLookupDirector;
import org.eclipse.debug.core.sourcelookup.ISourceContainer;
import org.eclipse.debug.core.sourcelookup.containers.DefaultSourceContainer;
import org.eclipse.jdt.core.IClasspathEntry;
import org.eclipse.jdt.internal.launching.RuntimeClasspathEntry;
import org.eclipse.jdt.launching.IJavaLaunchConfigurationConstants;
import org.eclipse.jdt.launching.IRuntimeClasspathEntry;
import org.eclipse.jdt.launching.IVMInstall;
import org.eclipse.jdt.launching.JavaRuntime;
import org.eclipse.osgi.util.NLS;
import org.eclipse.wst.common.frameworks.datamodel.DataModelFactory;
import org.eclipse.wst.common.frameworks.datamodel.IDataModel;
import org.eclipse.wst.common.frameworks.datamodel.IDataModelOperation;
import org.eclipse.wst.server.core.IModule;
import org.eclipse.wst.server.core.IServer;
import org.eclipse.wst.server.core.IServerListener;
import org.eclipse.wst.server.core.ServerEvent;
import org.eclipse.wst.server.core.ServerPort;
import org.eclipse.wst.server.core.internal.IModulePublishHelper;
import org.eclipse.wst.server.core.internal.ProgressUtil;
import org.eclipse.wst.server.core.model.IModuleFile;
import org.eclipse.wst.server.core.model.IModuleResource;
import org.eclipse.wst.server.core.model.IModuleResourceDelta;
import org.eclipse.wst.server.core.model.ServerBehaviourDelegate;
import org.eclipse.wst.server.core.util.PublishHelper;
import org.eclipse.wst.server.core.util.SocketUtil;
/**
* @version $Rev$ $Date$
*/
public class GeronimoServerBehaviourDelegate extends ServerBehaviourDelegate implements IGeronimoServerBehavior, IModulePublishHelper {
public static final int TIMER_TASK_INTERVAL = 20;
public static final int TIMER_TASK_DELAY = 20;
private Kernel kernel = null;
protected Timer stateTimer = null;
protected SynchronizeProjectOnServerTask synchronizerTask = null;
protected PingThread pingThread;
protected transient IDebugEventSetListener processListener;
public static final String ERROR_SETUP_LAUNCH_CONFIGURATION = "errorInSetupLaunchConfiguration";
private PublishStateListener publishStateListener;
private Set<IProject> knownSourceProjects = null;
private DefaultModuleHandler defaultModuleHandler;
private OSGiModuleHandler osgiModuleHandler;
protected ClassLoader getContextClassLoader() {
return Kernel.class.getClassLoader();
}
/*
* (non-Javadoc)
*
* @see org.eclipse.wst.server.core.model.ServerBehaviourDelegate#setupLaunchConfiguration(org.eclipse.debug.core.ILaunchConfigurationWorkingCopy,
* org.eclipse.core.runtime.IProgressMonitor)
*/
public void setupLaunchConfiguration(ILaunchConfigurationWorkingCopy wc, IProgressMonitor monitor) throws CoreException {
if (isRemote())// No launch for remote servers.
return;
wc.setAttribute(IJavaLaunchConfigurationConstants.ATTR_MAIN_TYPE_NAME, getRuntimeClass());
GeronimoRuntimeDelegate runtime = getRuntimeDelegate();
IVMInstall vmInstall = runtime.getVMInstall();
if (vmInstall != null) {
wc.setAttribute(IJavaLaunchConfigurationConstants.ATTR_JRE_CONTAINER_PATH,
JavaRuntime.newJREContainerPath(vmInstall).toPortableString());
}
String existingProgArgs = null;
wc.setAttribute(ERROR_SETUP_LAUNCH_CONFIGURATION, (String)null);
try{
setupLaunchClasspath(wc, vmInstall);
existingProgArgs = wc.getAttribute(IJavaLaunchConfigurationConstants.ATTR_PROGRAM_ARGUMENTS, (String) null);
}catch (CoreException e){
// Throwing a CoreException at this time will not accomplish anything useful as WTP will
// will essentially ignore it. Instead set a flag in the configuration that can
// subsequently be checked when an attempt is made to launch the server in
// GeronimoLaunchConfigurationDelegate.launch(). At that point a CoreException will be
// thrown that WTP will handle properly and will display an error dialog which is
// exactly what we want the GEP user to see.
wc.setAttribute(ERROR_SETUP_LAUNCH_CONFIGURATION, e.getMessage());
}
GeronimoServerDelegate gsd = getServerDelegate();
String programArgs = gsd.getProgramArgs();
Trace.tracePoint("GeronimoServerBehaviourDelegate.v30", Activator.traceCore, "setupLaunchConfiguration serverProgramArgs", programArgs);
wc.setAttribute(IJavaLaunchConfigurationConstants.ATTR_PROGRAM_ARGUMENTS, programArgs);
/*
programArgs = getServerProgramArgs(existingProgArgs, getServerDelegate());
Trace.tracePoint("GeronimoServerBehaviourDelegate.v30", "setupLaunchConfiguration serverProgramArgs",
programArgs);
wc.setAttribute(IJavaLaunchConfigurationConstants.ATTR_PROGRAM_ARGUMENTS, programArgs);
*/
String vmArgs = gsd.getVMArgs();
Trace.tracePoint("GeronimoServerBehaviourDelegate.v30", Activator.traceCore, "setupLaunchConfiguration serverVMArgs", vmArgs);
wc.setAttribute(IJavaLaunchConfigurationConstants.ATTR_VM_ARGUMENTS, vmArgs);
}
/**
* @param launch
* @param launchMode
* @param monitor
* @throws CoreException
*/
synchronized protected void setupLaunch(ILaunch launch, String launchMode, IProgressMonitor monitor) throws CoreException {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.setupLaunch", launch, launchMode, monitor);
if (!SocketUtil.isLocalhost(getServer().getHost()))
return;
ServerPort[] ports = getServer().getServerPorts(null);
for (int i = 0; i < ports.length; i++) {
ServerPort sp = ports[i];
if (SocketUtil.isPortInUse(ports[i].getPort(), 5))
throw new CoreException(new Status(IStatus.ERROR, Activator.PLUGIN_ID, 0, Messages.bind(Messages.errorPortInUse, Integer.toString(sp.getPort()), sp.getName()), null));
}
stopUpdateServerStateTask();
setServerState(IServer.STATE_STARTING);
setMode(launchMode);
IServerListener listener = new IServerListener() {
public void serverChanged(ServerEvent event) {
int eventKind = event.getKind();
if ((eventKind & ServerEvent.STATE_CHANGE) != 0) {
int state = event.getServer().getServerState();
if (state == IServer.STATE_STARTED
|| state == IServer.STATE_STOPPED) {
GeronimoServerBehaviourDelegate.this.getServer().removeServerListener(this);
startUpdateServerStateTask();
}
}
}
};
getServer().addServerListener(listener);
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.setupLaunch");
}
/*
* (non-Javadoc)
*
* @see org.eclipse.wst.server.core.model.ServerBehaviourDelegate#stop(boolean)
*/
public synchronized void stop(final boolean force) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.stop", force);
IServer server = getServer();
if (server.getServerState() != IServer.STATE_STOPPED) {
setServerState(IServer.STATE_STOPPING);
}
// stop threads
stopPingThread();
stopSynchronizeProjectOnServerTask();
// request shutdown
stopKernel();
// wait for shutdown
if (!waitForStopped(60 * 1000) || force) {
ILaunch launch = server.getLaunch();
if (launch != null) {
Trace.trace(Trace.INFO, "Killing the geronimo server process", Activator.traceCore); //$NON-NLS-1$
try {
launch.terminate();
} catch (Exception e) {
Trace.trace(Trace.ERROR, "Error killing the geronimo server process", e, Activator.logCore); //$NON-NLS-1$
}
}
}
GeronimoConnectionFactory.getInstance().destroy(server);
stopImpl();
Trace.tracePoint("Exit", Activator.traceCore, "GeronimoServerBehaviourDelegate.stop");
}
private boolean waitForStopped(long timeout) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.waitForStopped", timeout);
long started = System.currentTimeMillis();
boolean stopped = false;
try {
while (System.currentTimeMillis() - started < timeout) {
if (isKernelAlive()) {
Thread.sleep(500);
} else {
stopped = true;
break;
}
}
} catch (InterruptedException e) {
// ignore
}
Trace.tracePoint("Exit", Activator.traceCore, "GeronimoServerBehaviourDelegate.waitForStopped", stopped);
return stopped;
}
private void setStatus(IModule[] module, IStatus status, MultiStatus multiStatus) {
if (status.isOK()) {
setModulePublishState(module, IServer.PUBLISH_STATE_NONE);
setModuleStatus(module, null);
} else {
multiStatus.add(status);
setModuleStatus(module, status);
setModulePublishState(module, IServer.PUBLISH_STATE_UNKNOWN);
}
}
void setKnownSourceProjects(Set<IProject> knownSourceProjects) {
this.knownSourceProjects = knownSourceProjects;
}
boolean hasKnownSourceProject(List<IModule[]> moduleList) {
if (knownSourceProjects != null) {
for (IModule[] modules : moduleList) {
for (IModule module : modules) {
IProject project = module.getProject();
if (project != null && !knownSourceProjects.contains(project)) {
Trace.trace(Trace.INFO, "Project " + project.getName() + " is not source lookup list.", Activator.traceCore); //$NON-NLS-1$
return false;
}
}
}
}
return true;
}
void resetSourceLookupList() {
Trace.trace(Trace.INFO, "Resetting source lookup list.", Activator.traceCore); //$NON-NLS-1$
// reset DefaultSourceContainer - that will force Eclipse to re-compute the source paths
AbstractSourceLookupDirector locator = (AbstractSourceLookupDirector) getServer().getLaunch().getSourceLocator();
ISourceContainer[] oldContainers = locator.getSourceContainers();
ISourceContainer[] newContainers = new ISourceContainer[oldContainers.length];
System.arraycopy(oldContainers, 0, newContainers, 0, oldContainers.length);
DefaultSourceContainer newDefaultContainer = new DefaultSourceContainer();
for (int i = 0; i < newContainers.length; i++) {
if (newDefaultContainer.getType().equals(newContainers[i].getType())) {
newContainers[i] = newDefaultContainer;
break;
}
}
locator.setSourceContainers(newContainers);
// reset knownSourceProjects as they will be set once Eclipse re-computes the source paths
knownSourceProjects = null;
}
/*
* Override this method to be able to process in-place shared lib entries and restart the shared lib configuration for all projects prior
* to publishing each IModule.
*
* This overridden method also fixes WTP Bugzilla 123676 to prevent duplicate repdeloys if both parent and child modules have deltas.
*
* (non-Javadoc)
* @see org.eclipse.wst.server.core.model.ServerBehaviourDelegate#publishModules(int, java.util.List, java.util.List, org.eclipse.core.runtime.MultiStatus, org.eclipse.core.runtime.IProgressMonitor)
*/
protected void publishModules(int kind, List modules, List deltaKind, MultiStatus multi, IProgressMonitor monitor) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.publishModules", publishKindToString(kind), Arrays.asList(modules), Arrays.asList(deltaKind), multi, monitor);
//
// WTP publishes modules in reverse alphabetical order which does not account for possible
// dependencies between modules. If necessary reorder the publish order of the modules
// based on any discovered dependencies.
//
if (modules != null && modules.size() > 0) {
List list = getOrderedModules(this.getServer(),modules, deltaKind);
modules = (List) list.get(0);
deltaKind = (List) list.get(1);
// trace output
for (int i = 0; i < modules.size(); i++) {
IModule[] module = (IModule[]) modules.get(i);
Trace.trace(Trace.INFO, i + " " + Arrays.asList(module).toString() + " "
+ deltaKindToString(((Integer) deltaKind.get(i)).intValue()), Activator.traceCore);
}
}
IStatus status = Status.OK_STATUS;
if (modules != null && modules.size() > 0 && getGeronimoServer().isInPlaceSharedLib()) {
List<IModule> rootModules = new ArrayList<IModule>();
for(int i = 0; i < modules.size(); i++) {
IModule[] module = (IModule[]) modules.get(i);
if(!rootModules.contains(module[0])) {
rootModules.add(module[0]);
}
}
IModule[] toProcess = (IModule[])rootModules.toArray(new IModule[rootModules.size()]);
status = updateSharedLib(toProcess, ProgressUtil.getSubMonitorFor(monitor, 1000));
}
/*
* Build a map of root modules that need to be published. This is to ensure that
* we avoid redeploys and it guarantees that publishModule() is called once per
* deployed application.
*/
Map<String, ModuleDeltaList> publishMap = new LinkedHashMap<String, ModuleDeltaList>();
for (int i = 0; i < modules.size(); i++) {
IModule[] module = (IModule[]) modules.get(i);
Integer moduleDeltaKind = (Integer) deltaKind.get(i);
IModule rootModule = module[0];
ModuleDeltaList list = publishMap.get(rootModule.getId());
if (list == null) {
list = new ModuleDeltaList(rootModule);
publishMap.put(rootModule.getId(), list);
}
if (module.length == 1) {
list.setRootModuleDelta(moduleDeltaKind.intValue());
} else {
list.addChildModule(module, moduleDeltaKind.intValue());
}
}
// Reset source code lookup list - see GERONIMODEVTOOLS-763 for details.
if (ILaunchManager.DEBUG_MODE.equals(getServer().getMode()) && !hasKnownSourceProject(modules)) {
resetSourceLookupList();
}
if(status.isOK()) {
if (modules == null)
return;
int size = modules.size();
if (size == 0)
return;
if (monitor.isCanceled())
return;
// phase 1: see if the modified contents can copied/replaced
if (getServerDelegate().isNoRedeploy() && !isRemote()) {
Iterator<ModuleDeltaList> iterator = publishMap.values().iterator();
while (iterator.hasNext()) {
ModuleDeltaList moduleList = iterator.next();
IModule[] rootModule = moduleList.getRootModule();
if (GeronimoUtils.isEBAModule(rootModule[0]) || GeronimoUtils.isEarModule(rootModule[0])) {
if (moduleList.hasChangedChildModulesOnly(true)) {
boolean replacementPossible = true;
Map<IModule[], IStatus> statusMap = new HashMap<IModule[], IStatus>();
for (ModuleDelta moduleDelta : moduleList.getChildModules()) {
IModule bundleModule = moduleDelta.module[1];
if (moduleDelta.delta == CHANGED && (GeronimoUtils.isWebModule(bundleModule) || GeronimoUtils.isBundleModule(bundleModule))) {
// try to do replacement
status = tryFileReplace(moduleDelta.module);
if (status == null) {
// replacement was not possible
replacementPossible = false;
break;
} else {
statusMap.put(moduleDelta.module, status);
}
} else {
statusMap.put(moduleDelta.module, Status.OK_STATUS);
}
}
if (replacementPossible) {
// replacement was possible for all changed child modules - remove it from publish list
iterator.remove();
statusMap.put(rootModule, Status.OK_STATUS);
for (Map.Entry<IModule[], IStatus> entry : statusMap.entrySet()) {
setStatus(entry.getKey(), entry.getValue(), multi);
}
} else {
// replacement was not possible for at least one child module - redeploy the module
}
}
} else if (GeronimoUtils.isWebModule(rootModule[0]) || GeronimoUtils.isBundleModule(rootModule[0])) {
if (moduleList.getEffectiveRootDelta() == CHANGED) {
// contents changed - try to do replacement
status = tryFileReplace(rootModule);
if (status != null) {
// replacement was possible - remove it from publish list
iterator.remove();
setStatus(rootModule, status, multi);
} else {
// replacement was not possible - redeploy the module
}
}
}
}
}
// phase 2: re-deploy the modules
boolean refreshOSGiBundle = getServerDelegate().isRefreshOSGiBundle();
for (ModuleDeltaList moduleList : publishMap.values()) {
IModule[] rootModule = moduleList.getRootModule();
AbstractName ebaName = null;
if (refreshOSGiBundle &&
GeronimoUtils.isEBAModule(rootModule[0]) &&
moduleList.hasChangedChildModulesOnly(false) &&
(ebaName = getApplicationGBeanName(rootModule[0])) != null) {
List<IModule[]> changedModules = new ArrayList<IModule[]>();
List<IModule[]> unChangedModules = new ArrayList<IModule[]>();
for (ModuleDelta moduleDelta : moduleList.getChildModules()) {
if (moduleDelta.delta == CHANGED) {
changedModules.add(moduleDelta.module);
} else {
unChangedModules.add(moduleDelta.module);
}
}
status = refreshBundles(rootModule[0], ebaName, changedModules, ProgressUtil.getSubMonitorFor(monitor, 3000));
if (status != null && !status.isOK()) {
multi.add(status);
}
unChangedModules.add(rootModule);
for (IModule[] module : unChangedModules) {
setModulePublishState(module, IServer.PUBLISH_STATE_NONE);
setModuleStatus(module, null);
}
} else {
status = publishModule(kind, rootModule, moduleList.getEffectiveRootDelta(), ProgressUtil.getSubMonitorFor(monitor, 3000));
if (status != null && !status.isOK()) {
multi.add(status);
} else {
for (ModuleDelta moduleDelta : moduleList.getChildModules()) {
setModulePublishState(moduleDelta.module, IServer.PUBLISH_STATE_NONE);
setModuleStatus(moduleDelta.module, null);
}
}
}
}
} else {
multi.add(status);
}
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.publishModules");
}
private AbstractName getApplicationGBeanName(IModule ebaModule) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.getApplicationGBeanName", ebaModule);
IServer server = getServer();
AbstractName ebaName = null;
try {
String configId = DeploymentUtils.getConfigId(server, ebaModule);
ExtendedDeploymentManager dm = (ExtendedDeploymentManager) DeploymentCommandFactory.getDeploymentManager(server);
ebaName = dm.getApplicationGBeanName(Artifact.create(configId));
} catch (CoreException e) {
Trace.trace(Trace.WARNING, "Error getting gbean name", e, Activator.traceCore);
}
Trace.tracePoint("Exit", Activator.traceCore, "GeronimoServerBehaviourDelegate.getApplicationGBeanName", ebaName);
return ebaName;
}
private IStatus refreshBundles(IModule ebaModule, AbstractName ebaName, List<IModule[]> bundleModules, IProgressMonitor monitor) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.refreshBundles", ebaModule, ebaName, bundleModules, monitor);
if (monitor.isCanceled()) {
return Status.CANCEL_STATUS;
}
MultiStatus multiStatus = new MultiStatus(Activator.PLUGIN_ID, 0, "", null);
try {
ExtendedDeploymentManager dm = (ExtendedDeploymentManager) DeploymentCommandFactory.getDeploymentManager(getServer());
long[] bundleIds = dm.getEBAContentBundleIds(ebaName);
Map<String, Long> bundleMap = new HashMap<String, Long>();
for (long bundleId : bundleIds) {
String symbolicName = dm.getEBAContentBundleSymbolicName(ebaName, bundleId);
if (symbolicName != null) {
bundleMap.put(symbolicName, bundleId);
}
}
for (IModule[] bundleModule : bundleModules) {
if (monitor.isCanceled()) {
return Status.CANCEL_STATUS;
}
IStatus status = refreshBundle(ebaModule, bundleModule[1], ebaName, bundleMap);
setStatus(bundleModule, status, multiStatus);
}
} catch (Exception e) {
multiStatus.add(new Status(IStatus.ERROR, Activator.PLUGIN_ID, Messages.REFRESH_FAIL, e));
}
IStatus status;
if (multiStatus.isOK()) {
status = Status.OK_STATUS;
} else {
status = multiStatus;
}
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.refreshBundles", status);
return status;
}
private IStatus refreshBundle(IModule ebaModule, IModule bundleModule, AbstractName ebaName, Map<String, Long> bundleMap) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.refreshBundle", ebaModule, bundleModule, ebaName, bundleMap);
try {
String symbolicName = AriesHelper.getSymbolicName(bundleModule);
Long bundleId = bundleMap.get(symbolicName);
if (bundleId == null) {
return new Status(IStatus.ERROR, Activator.PLUGIN_ID, Messages.bind(Messages.REFRESH_NO_BUNDLE_FAIL,
new String[] {bundleModule.getProject().getName(), ebaModule.getProject().getName()}));
}
ExtendedDeploymentManager dm = (ExtendedDeploymentManager) DeploymentCommandFactory.getDeploymentManager(getServer());
/*
* Try class hot swap first and if it fails fallback to regular bundle update.
*/
if (!refreshBundleClasses(dm, ebaModule, bundleModule, ebaName, bundleId)) {
File file = DeploymentUtils.getTargetFile(getServer(), bundleModule);
dm.updateEBAContent(ebaName, bundleId, file);
}
} catch (Exception e) {
return new Status(IStatus.ERROR, Activator.PLUGIN_ID, Messages.REFRESH_FAIL, e);
}
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.refreshBundle");
return Status.OK_STATUS;
}
private boolean refreshBundleClasses(ExtendedDeploymentManager dm, IModule ebaModule, IModule bundleModule, AbstractName ebaName, long bundleId) throws Exception {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.refreshBundleClasses", ebaModule, bundleModule, ebaName, bundleId);
// check if class hot swap is supported
if (!dm.isRedefineClassesSupported()) {
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.refreshBundleClasses", "Class redefinition is not supported");
return false;
}
// ensure only classes have changed
IModuleResourceDelta[] delta = getPublishedResourceDelta(new IModule[] { ebaModule, bundleModule });
IModuleResource[] classResources = DeploymentUtils.getChangedClassResources(delta);
if (classResources == null) {
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.refreshBundleClasses", "Non-class resource modifications found");
return false;
}
// create temp. zip with the changes
File changeSetFile = DeploymentUtils.createChangeSetFile(classResources);
if (changeSetFile == null) {
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.refreshBundleClasses", "Error creating file with resource modifications");
return false;
}
// get document base for the module if it is expanded
String documentBase = getServerDelegate().isNoRedeploy() ? getWebModuleDocumentBase(bundleModule) : null;
// see if the classes can be hot swapped - update archive if module is not expanded
if (!dm.hotSwapEBAContent(ebaName, bundleId, changeSetFile, documentBase == null)) {
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.refreshBundleClasses", "Bundle class hot swap cannot be preformed");
changeSetFile.delete();
return false;
} else {
changeSetFile.delete();
}
if (documentBase != null) {
PublishHelper publishHelper = new PublishHelper(getTempDirectory().toFile());
IStatus[] statusArray = publishHelper.publishFull(classResources, new Path(documentBase), null);
if (statusArray != null) {
// XXX: in case of an error should we return false to force full re-deploy?
for (IStatus status : statusArray) {
if (!status.isOK()) {
Trace.trace(Trace.WARNING, "Error publishing changes: " + status.getMessage(), status.getException(), Activator.traceCore);
}
}
}
}
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.refreshBundleClasses", "Bundle class hot swap was succesfully preformed", documentBase);
return true;
}
private static class ModuleDelta {
private final IModule[] module;
private int delta = NO_CHANGE;
public ModuleDelta(IModule[] module, int delta) {
this.module = module;
this.delta = delta;
}
}
private static class ModuleDeltaList {
private ModuleDelta root;
private List<ModuleDelta> children;
public ModuleDeltaList(IModule rootModule) {
this.root = new ModuleDelta(new IModule [] {rootModule}, NO_CHANGE);
this.children = new ArrayList<ModuleDelta>();
}
public IModule[] getRootModule() {
return root.module;
}
public int getEffectiveRootDelta() {
if (root.delta == NO_CHANGE) {
for (ModuleDelta child : children) {
if (child.delta == ADDED || child.delta == REMOVED || child.delta == CHANGED) {
return CHANGED;
}
}
}
return root.delta;
}
public void setRootModuleDelta(int moduleDelta) {
root.delta = moduleDelta;
}
public void addChildModule(IModule[] module, int moduleDelta) {
children.add(new ModuleDelta(module, moduleDelta));
}
public List<ModuleDelta> getChildModules() {
return children;
}
public boolean hasChangedChildModulesOnly(boolean allChangedAllowed) {
int changed = getChangedChildModulesOnly();
if (changed > 0) {
if (allChangedAllowed) {
return true;
} else {
if (children.size() == 1) {
// special case: always return true if module only has one child module
return true;
} else {
return (changed < children.size());
}
}
}
return false;
}
/*
* Returns number of "changed" child modules.
* Returns -1 if a single "added" or "removed" child module is found or a root module is modified.
*/
public int getChangedChildModulesOnly() {
if (root.delta == NO_CHANGE) {
int changed = 0;
for (ModuleDelta child : children) {
if (child.delta == ADDED || child.delta == REMOVED) {
return -1;
} else if (child.delta == CHANGED) {
changed++;
}
}
return changed;
}
return -1;
}
}
/*
* This method is used to invoke DependencyHelper of different version
*/
protected List getOrderedModules(IServer server, List modules, List deltaKind) {
DependencyHelper dh = new DependencyHelper();
List list = dh.reorderModules(this.getServer(),modules, deltaKind);
return list;
}
/*
* (non-Javadoc)
*
* @see org.eclipse.wst.server.core.model.ServerBehaviourDelegate#publishModule(int,
* int, org.eclipse.wst.server.core.IModule[],
* org.eclipse.core.runtime.IProgressMonitor)
*/
public void publishModule(int kind, int deltaKind, IModule[] module, IProgressMonitor monitor) throws CoreException {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.publishModule", publishKindToString(kind), deltaKindToString(deltaKind), Arrays.asList(module), monitor);
try {
//NO_CHANGE need if app is associated but not started and no delta
if (deltaKind == NO_CHANGE && module.length == 1) {
invokeCommand(deltaKind, module[0], monitor);
}
else if (deltaKind == CHANGED || deltaKind == ADDED || deltaKind == REMOVED) {
invokeCommand(deltaKind, module[0], monitor);
}
setModuleStatus(module, null);
setModulePublishState(module, IServer.PUBLISH_STATE_NONE);
}
catch (CoreException e) {
//
// Set the parent module publish state to UNKNOWN so that WTP will display "Republish" instead
// "Synchronized" for the server state, and set the module status to an error message
// for the GEP end-user to see.
//
setModuleStatus(module, new Status(IStatus.ERROR, Activator.PLUGIN_ID, "Error publishing module to server"));
setModulePublishState(module, IServer.PUBLISH_STATE_UNKNOWN);
setModuleState(module, IServer.STATE_UNKNOWN);
throw e;
}
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.publishModule");
}
@Override
public void publishStart(IProgressMonitor monitor) throws CoreException {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.publishStart", monitor);
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.publishStart");
}
@Override
public void publishFinish(IProgressMonitor monitor) throws CoreException {
doPublishFinish(monitor);
}
private void doPublishFinish(IProgressMonitor monitor) throws CoreException {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.publishFinish", monitor);
IModule[] modules = this.getServer().getModules();
boolean allpublished = true;
for (int i = 0; i < modules.length; i++) {
int state = getServer().getModulePublishState(new IModule[] { modules[i] });
if (state != IServer.PUBLISH_STATE_NONE) {
allpublished = false;
break;
}
}
if (allpublished) {
setServerPublishState(IServer.PUBLISH_STATE_NONE);
setServerStatus(null);
} else {
setServerPublishState(IServer.PUBLISH_STATE_UNKNOWN);
setServerStatus(new Status(IStatus.ERROR, Activator.PLUGIN_ID, "Error publishing one or more modules to server"));
}
GeronimoConnectionFactory.getInstance().destroy(getServer());
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.publishFinish");
}
/**
* Initializes the Geronimo server delegate. This method is called by the server core framework
* to give delegates a chance to do their own initialization. As such, the GEP proper should
* never call this method.
*
* @param monitor a progress monitor, or <code>null</code> if progress reporting and cancellation
* are not desired
*/
@Override
protected void initialize(IProgressMonitor monitor) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.initialize", monitor);
publishStateListener = new PublishStateListener();
getServer().addServerListener(publishStateListener, ServerEvent.MODULE_CHANGE | ServerEvent.PUBLISH_STATE_CHANGE);
defaultModuleHandler = new DefaultModuleHandler(this);
osgiModuleHandler = new OSGiModuleHandler(this);
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.initialize");
}
/*
* GERONIMODEVTOOLS-715: Update parent module publish state to "publish" if a child
* publish state was changed to "publish". This is because GEP right now is redeploying the
* entire application instead of the individual bundle/module that has changed. Once that is
* supported this listener can be removed.
*/
private class PublishStateListener implements IServerListener {
public void serverChanged(ServerEvent event) {
if (event.getPublishState() == IServer.PUBLISH_STATE_INCREMENTAL ||
event.getPublishState() == IServer.PUBLISH_STATE_FULL) {
// reset server status in case it was set
setServerStatus(null);
IModule[] modules = event.getModule();
if (modules.length > 1) {
if (getServer().getServerState() == IServer.STATE_STARTED) {
setModulePublishState(event.getModule(), IServer.PUBLISH_STATE_NONE);
setModuleStatus(event.getModule(), new Status(IStatus.OK, Activator.PLUGIN_ID, Messages.moduleModified));
} else {
setModulePublishState(event.getModule(), IServer.PUBLISH_STATE_UNKNOWN);
setModuleStatus(event.getModule(), null);
}
IModule[] newModules = new IModule[modules.length - 1];
System.arraycopy(modules, 0, newModules, 0, newModules.length);
// update parent module publish state to "publish"
setModulePublishState(newModules, event.getPublishState());
// reset parent module status message
setModuleStatus(newModules, null);
}
}
}
}
/*
* (non-Javadoc)
*
* @see org.eclipse.wst.server.core.model.ServerBehaviourDelegate#dispose()
*/
public void dispose() {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.dispose");
stopUpdateServerStateTask();
stopSynchronizeProjectOnServerTask();
if (publishStateListener != null) {
getServer().removeServerListener(publishStateListener);
}
Trace.tracePoint("Exit", Activator.traceCore, "GeronimoServerBehaviourDelegate.dispose");
}
public String getRuntimeClass() {
return "org.apache.geronimo.cli.daemon.DaemonCLI";
}
public void setServerStarted() {
setServerState(IServer.STATE_STARTED);
GeronimoConnectionFactory.getInstance().destroy(getServer());
startSynchronizeProjectOnServerTask();
}
public void setServerStopped() {
setServerState(IServer.STATE_STOPPED);
resetModuleState();
stopSynchronizeProjectOnServerTask();
if (defaultModuleHandler != null) {
defaultModuleHandler.serverStopped();
}
if (osgiModuleHandler != null) {
osgiModuleHandler.serverStopped();
}
GeronimoConnectionFactory.getInstance().destroy(getServer());
}
public IGeronimoServer getGeronimoServer() {
return (IGeronimoServer) getServer().loadAdapter(IGeronimoServer.class, null);
}
protected void stopImpl() {
if (processListener != null) {
DebugPlugin.getDefault().removeDebugEventListener(processListener);
processListener = null;
}
setServerState(IServer.STATE_STOPPED);
resetModuleState();
}
private void resetModuleState() {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.resetModuleState");
IModule[] modules = getServer().getModules();
for (int i = 0; i < modules.length; i++) {
IModule[] module = new IModule[] { modules[i] };
setModuleState(module, IServer.STATE_STOPPED);
}
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.resetModuleState");
}
protected AbstractModuleHandler getModuleHandler(IModule module) {
return (GeronimoUtils.isBundleModule(module) || GeronimoUtils.isFragmentBundleModule(module)) ? osgiModuleHandler : defaultModuleHandler;
}
protected void invokeCommand(int deltaKind, IModule module, IProgressMonitor monitor) throws CoreException {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.invokeCommand", deltaKindToString(deltaKind), module.getName());
AbstractModuleHandler moduleHandler = getModuleHandler(module);
ClassLoader old = Thread.currentThread().getContextClassLoader();
try {
ClassLoader cl = getContextClassLoader();
if (cl != null)
Thread.currentThread().setContextClassLoader(cl);
switch (deltaKind) {
case ADDED: {
moduleHandler.doAdded(module, monitor);
break;
}
case CHANGED: {
moduleHandler.doChanged(module, monitor);
break;
}
case REMOVED: {
moduleHandler.doRemoved(module, monitor);
break;
}
case NO_CHANGE: {
moduleHandler.doNoChange(module, monitor);
break;
}
default:
throw new IllegalArgumentException();
}
} catch (CoreException e) {
throw e;
} catch (Exception e) {
e.printStackTrace();
} finally {
Thread.currentThread().setContextClassLoader(old);
}
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.invokeCommand");
}
private String getWebModuleDocumentBase(IModule webModule) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.getWebModuleDocumentBase", webModule);
if (webModule.isExternal()) {
Trace.tracePoint("Exit", Activator.traceCore, "GeronimoServerBehaviourDelegate.getWebModuleDocumentBase", "External module");
return null;
}
String contextPath = getServerDelegate().getContextPath(webModule);
if (contextPath == null) {
Trace.tracePoint("Exit", Activator.traceCore, "GeronimoServerBehaviourDelegate.getWebModuleDocumentBase", "Context path is null");
return null;
}
String documentBase = getWebModuleDocumentBase(contextPath);
Trace.tracePoint("Exit", Activator.traceCore, "GeronimoServerBehaviourDelegate.getWebModuleDocumentBase", contextPath, documentBase);
return documentBase;
}
private IStatus tryFileReplace(IModule[] module) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.tryFileReplace", module.toString());
IModule webModule = module[module.length - 1];
String documentBase = getWebModuleDocumentBase(webModule);
if (documentBase == null ) {
Trace.tracePoint("Exit", Activator.traceCore, "GeronimoServerBehaviourDelegate.tryFileReplace", "Document base is null");
return null;
}
List<IModuleResourceDelta> modifiedFiles = findModifiedFiles(module);
if (modifiedFiles == null) {
Trace.tracePoint("Exit", Activator.traceCore, "GeronimoServerBehaviourDelegate.tryFileReplace", "Some modified files cannot be replaced");
return null;
}
Trace.trace(Trace.INFO, "Modified files: " + modifiedFiles, Activator.logCore);
IStatus status = findAndReplaceFiles(webModule, modifiedFiles, documentBase);
Trace.tracePoint("Exit", Activator.traceCore, "GeronimoServerBehaviourDelegate.tryFileReplace", status);
return status;
}
private List<IModuleResourceDelta> findModifiedFiles(IModule[] module) {
IModuleResourceDelta[] deltaArray = getPublishedResourceDelta(module);
GeronimoServerDelegate delegate = getServerDelegate();
List<String> includes = delegate.getNoRedeployFilePatternsAsList(true);
List<String> excludes = delegate.getNoRedeployFilePatternsAsList(false);
List<IModuleResourceDelta> modifiedFiles = new ArrayList<IModuleResourceDelta>();
for (IModuleResourceDelta delta : deltaArray) {
List<IModuleResourceDelta> files = DeploymentUtils.getAffectedFiles(delta, includes, excludes);
// if null then some other files were changed that we cannot just copy/replace.
if (files == null) {
return null;
} else {
modifiedFiles.addAll(files);
}
}
return modifiedFiles;
}
/*
* This method is used to replace updated files without redeploying the entire module.
*/
private IStatus findAndReplaceFiles(IModule module, List<IModuleResourceDelta> modifiedFiles, String documentBase) {
Trace.trace(Trace.INFO, "Replacing updated files for " + module.getName() + " module.", Activator.logCore);
String ch = File.separator;
byte[] buffer = new byte[10 * 1024];
int bytesRead;
for (IModuleResourceDelta deltaModule : modifiedFiles) {
IModuleFile moduleFile = (IModuleFile) deltaModule.getModuleResource();
StringBuilder target = new StringBuilder(documentBase);
target.append(ch);
String relativePath = moduleFile.getModuleRelativePath().toOSString();
if (relativePath != null && relativePath.length() != 0) {
target.append(relativePath);
target.append(ch);
}
target.append(moduleFile.getName());
File file = new File(target.toString());
if(! file.isAbsolute()) {
file = getServerResource(IGeronimoServerBehavior.VAR_CATALINA_DIR + target.toString()).toFile();
}
switch (deltaModule.getKind()) {
case IModuleResourceDelta.REMOVED:
if (file.exists()) {
file.delete();
}
break;
case IModuleResourceDelta.ADDED:
case IModuleResourceDelta.CHANGED:
File parentFile = file.getParentFile();
if (parentFile != null && !parentFile.exists()) {
if (!parentFile.mkdirs()) {
Trace.trace(Trace.ERROR, "Cannot create target directory: " + parentFile, Activator.logCore);
return new Status(IStatus.ERROR, Activator.PLUGIN_ID, "Cannot create target directory", null);
}
}
String sourceFile = relativePath;
InputStream in = null;
FileOutputStream out = null;
try {
IFile srcIFile = (IFile) moduleFile.getAdapter(IFile.class);
if (srcIFile != null) {
in = srcIFile.getContents();
} else {
File srcFile = (File) moduleFile.getAdapter(File.class);
in = new FileInputStream(srcFile);
}
out = new FileOutputStream(file);
while ((bytesRead = in.read(buffer)) > 0) {
out.write(buffer, 0, bytesRead);
}
} catch (FileNotFoundException e) {
Trace.trace(Trace.ERROR, "Cannot find file to copy: " + sourceFile, e, Activator.logCore);
return new Status(IStatus.ERROR, Activator.PLUGIN_ID, "Cannot find file " + sourceFile, e);
} catch (IOException e) {
Trace.trace(Trace.ERROR, "Cannot copy file: " + sourceFile, e, Activator.logCore);
return new Status(IStatus.ERROR, Activator.PLUGIN_ID, "Cannot copy file " + sourceFile, e);
} catch (CoreException e) {
Trace.trace(Trace.ERROR, "Cannot copy file: " + sourceFile, e, Activator.logCore);
return e.getStatus();
} finally {
if (in != null) {
try { in.close(); } catch (IOException ignore) {}
}
if (out != null) {
try { out.close(); } catch (IOException ignore) {}
}
}
break;
}
}
return Status.OK_STATUS;
}
public Map getServerInstanceProperties() {
return getRuntimeDelegate().getServerInstanceProperties();
}
protected GeronimoRuntimeDelegate getRuntimeDelegate() {
GeronimoRuntimeDelegate rd = (GeronimoRuntimeDelegate) getServer().getRuntime().getAdapter(GeronimoRuntimeDelegate.class);
if (rd == null)
rd = (GeronimoRuntimeDelegate) getServer().getRuntime().loadAdapter(GeronimoRuntimeDelegate.class, new NullProgressMonitor());
return rd;
}
protected GeronimoServerDelegate getServerDelegate() {
GeronimoServerDelegate sd = (GeronimoServerDelegate) getServer().getAdapter(GeronimoServerDelegate.class);
if (sd == null)
sd = (GeronimoServerDelegate) getServer().loadAdapter(GeronimoServerDelegate.class, new NullProgressMonitor());
return sd;
}
protected boolean isRemote() {
return getServer().getServerType().supportsRemoteHosts()
&& !SocketUtil.isLocalhost(getServer().getHost());
}
protected void setupLaunchClasspath(ILaunchConfigurationWorkingCopy wc, IVMInstall vmInstall) throws CoreException {
List<IRuntimeClasspathEntry> cp = new ArrayList<IRuntimeClasspathEntry>();
String version = getServer().getRuntime().getRuntimeType().getVersion();
if (version.startsWith("3")) {
//get required jar file
IPath libPath = getServer().getRuntime().getLocation().append("/lib");
for (String jarFile: libPath.toFile().list()){
IPath serverJar = libPath.append("/"+jarFile);
cp.add(JavaRuntime.newArchiveRuntimeClasspathEntry(serverJar));
}
}else{
//for 1.1,2.0,2.1,2.2
IPath serverJar = getServer().getRuntime().getLocation().append("/bin/server.jar");
cp.add(JavaRuntime.newArchiveRuntimeClasspathEntry(serverJar));
}
// merge existing classpath with server classpath
IRuntimeClasspathEntry[] existingCps = JavaRuntime.computeUnresolvedRuntimeClasspath(wc);
for (int i = 0; i < existingCps.length; i++) {
Trace.trace(Trace.INFO, "cpentry: " + cp , Activator.traceCore);
if (cp.contains(existingCps[i]) == false) {
cp.add(existingCps[i]);
}
}
//
// Add classpath entries from any selected classpath containers
//
if ( getGeronimoServer().isSelectClasspathContainers()) {
List<String> containers = getGeronimoServer().getClasspathContainers();
for ( String containerPath : containers ) {
List<IClasspathEntry> cpes = ClasspathContainersHelper.queryWorkspace( containerPath );
for ( IClasspathEntry cpe : cpes ) {
RuntimeClasspathEntry rcpe = new RuntimeClasspathEntry( cpe );
Trace.trace(Trace.INFO, "Classpath Container Entry: " + rcpe , Activator.traceCore);
if (cp.contains(rcpe) == false) {
cp.add( rcpe );
}
}
}
}
wc.setAttribute(IJavaLaunchConfigurationConstants.ATTR_CLASSPATH, convertCPEntryToMemento(cp));
wc.setAttribute(IJavaLaunchConfigurationConstants.ATTR_DEFAULT_CLASSPATH, false);
}
private List<String> convertCPEntryToMemento(List<IRuntimeClasspathEntry> cpEntryList) {
List<String> list = new ArrayList<String>(cpEntryList.size());
Iterator<IRuntimeClasspathEntry> iterator = cpEntryList.iterator();
while (iterator.hasNext()) {
IRuntimeClasspathEntry entry = iterator.next();
try {
list.add(entry.getMemento());
} catch (CoreException e) {
Trace.trace(Trace.ERROR, "Could not resolve classpath entry: "
+ entry, e, Activator.logCore);
}
}
return list;
}
public void setProcess(final IProcess newProcess) {
if (newProcess == null) {
return;
}
if (processListener != null) {
DebugPlugin.getDefault().removeDebugEventListener(processListener);
}
processListener = new IDebugEventSetListener() {
public void handleDebugEvents(DebugEvent[] events) {
if (events != null) {
int size = events.length;
for (int i = 0; i < size; i++) {
if (newProcess.equals(events[i].getSource()) && events[i].getKind() == DebugEvent.TERMINATE) {
stopImpl();
}
}
}
}
};
DebugPlugin.getDefault().addDebugEventListener(processListener);
}
protected void startPingThread() {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.startPingThread");
pingThread = new PingThread(this, getServer());
pingThread.start();
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.startPingThread");
}
protected void stopPingThread() {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.stopPingThread");
if (pingThread != null) {
pingThread.interrupt();
pingThread = null;
}
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.stopPingThread");
}
protected Kernel getKernel() throws SecurityException {
if (kernel == null) {
try {
MBeanServerConnection connection = getServerConnection();
if (connection != null)
kernel = new KernelDelegate(connection);
} catch (SecurityException e) {
throw e;
} catch (Exception e) {
Trace.trace(Trace.INFO, "Kernel connection failed. "
+ e.getMessage(), Activator.traceCore);
}
}
return kernel;
}
private void stopKernel() {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.stopKernel");
try {
MBeanServerConnection connection = getServerConnection();
connection.invoke(getFrameworkMBean(connection), "stopBundle",
new Object[] { 0 }, new String[] { long.class.getName() });
} catch (Exception e) {
Trace.trace(Trace.ERROR, "Error while requesting server shutdown", e, Activator.traceCore);
}
Trace.tracePoint("Exit", Activator.traceCore, "GeronimoServerBehaviourDelegate.stopKernel");
}
public boolean isKernelAlive() {
try {
return getKernel() != null && kernel.isRunning();
} catch (SecurityException e) {
Trace.trace(Trace.ERROR, "Invalid username and/or password.", e, Activator.logCore);
pingThread.interrupt();
if (getServer().getServerState() != IServer.STATE_STOPPED) {
forceStopJob(true,e);
}
} catch (Exception e) {
Trace.trace(Trace.WARNING, "Geronimo Server may have been terminated manually outside of workspace.", e, Activator.logCore);
kernel = null;
}
return false;
}
private void forceStopJob(boolean b, final SecurityException e) {
/*
*
* Currently, there is another Status is returned by StartJob in Server.
* The message doesn't contain reason for the exception.
* So this job is created to show a message(Invalid username and/or password) to user.
*
* TODO: Need a method to remove the error message thrown by StartJob in Server.
*
*/
String jobName = NLS.bind(org.eclipse.wst.server.core.internal.Messages.errorStartFailed, getServer().getName());
//This message has different variable names in WTP 3.0 and 3.1, so we define it here instead of using that in WTP
final String jobStartingName = NLS.bind("Starting {0}", getServer().getName());
new Job(jobName) {
@Override
protected IStatus run(IProgressMonitor monitor) {
MultiStatus multiStatus = new MultiStatus(Activator.PLUGIN_ID, 0, jobStartingName, null);
multiStatus.add(new Status(IStatus.ERROR,Activator.PLUGIN_ID,0,"Invalid username and/or password.",e));
try {
stop(true);
} catch (Exception e){
multiStatus.add(new Status(IStatus.ERROR,Activator.PLUGIN_ID,0,"Failed to stop server",e));
}
return multiStatus;
}
}.schedule();
}
public boolean isFullyStarted() {
if (isKernelAlive()) {
AbstractNameQuery query = new AbstractNameQuery(PersistentConfigurationList.class.getName());
Set<AbstractName> configLists = kernel.listGBeans(query);
if (!configLists.isEmpty()) {
AbstractName on = (AbstractName) configLists.toArray()[0];
try {
Boolean b = (Boolean) kernel.getAttribute(on, "kernelFullyStarted");
return b.booleanValue();
} catch (GBeanNotFoundException e) {
// ignore
} catch (NoSuchAttributeException e) {
// ignore
} catch (Exception e) {
e.printStackTrace();
}
} else {
Trace.trace(Trace.INFO, "configLists is empty", Activator.traceCore);
}
}
return false;
}
@Override
public IPath getServerResource(String path) {
IPath serverRoot = getServer().getRuntime().getLocation();
return serverRoot.append(path);
}
public void startUpdateServerStateTask() {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.startUpdateServerStateTask", getServer().getName());
stateTimer = new Timer(true);
stateTimer.schedule(new UpdateServerStateTask(this, getServer()), 0, TIMER_TASK_INTERVAL * 1000);
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.startUpdateServerStateTask");
}
public void startSynchronizeProjectOnServerTask() {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.startSynchronizeProjectOnServerTask", getServer().getName());
if (synchronizerTask != null) {
synchronizerTask.stop();
}
synchronizerTask = new SynchronizeProjectOnServerTask(this, getServer());
synchronizerTask.start();
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.startSynchronizeProjectOnServerTask");
}
public void stopUpdateServerStateTask() {
Trace.tracePoint("Entry", "GeronimoServerBehaviourDelegate.stopUpdateServerStateTask", Activator.traceCore);
if (stateTimer != null) {
stateTimer.cancel();
stateTimer = null;
}
Trace.tracePoint("Exit ", "GeronimoServerBehaviourDelegate.stopUpdateServerStateTask", Activator.traceCore);
}
public void stopSynchronizeProjectOnServerTask() {
Trace.tracePoint("Entry", "GeronimoServerBehaviourDelegate.stopSynchronizeProjectOnServerTask", Activator.traceCore);
if (synchronizerTask != null) {
synchronizerTask.stop();
synchronizerTask = null;
}
Trace.tracePoint("Exit ", "GeronimoServerBehaviourDelegate.stopSynchronizeProjectOnServerTask", Activator.traceCore);
}
protected IPath getModulePath(IModule[] module, URL baseURL) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.getModulePath", Arrays.asList(module), baseURL);
IPath modulePath = new Path(baseURL.getFile());
if (module.length == 2) {
IModule workingModule = module[module.length - 1];
modulePath = modulePath.append(workingModule.getName());
if (GeronimoUtils.isWebModule(workingModule)) {
modulePath = modulePath.addFileExtension("war");
} else if (GeronimoUtils.isEjbJarModule(workingModule)) {
modulePath = modulePath.addFileExtension("jar");
} else if (GeronimoUtils.isRARModule(workingModule)) {
modulePath = modulePath.addFileExtension("rar");
} else if (GeronimoUtils.isEarModule(workingModule)) {
modulePath = modulePath.addFileExtension("ear");
} else if (GeronimoUtils.isAppClientModule(workingModule)) {
modulePath = modulePath.addFileExtension("jar");
}
}
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.getModulePath", modulePath);
return modulePath;
}
private Kernel getDeploymentManagerKernel() {
GeronimoConnectionFactory connectionFactory = GeronimoConnectionFactory.getInstance();
try {
JMXDeploymentManager manager =
(JMXDeploymentManager) connectionFactory.getDeploymentManager(getServer());
return manager.getKernel();
} catch (DeploymentManagerCreationException e) {
Trace.trace(Trace.WARNING, "Error getting kernel from deployment manager", e, Activator.logCore);
return null;
}
}
public MBeanServerConnection getServerConnection() throws Exception {
String host = getServer().getHost();
String user = getGeronimoServer().getAdminID();
String password = getGeronimoServer().getAdminPassword();
String port = getGeronimoServer().getRMINamingPort();
JMXConnectorInfo connectorInfo = new JMXConnectorInfo(user, password, host, port);
// Using the classloader that loads the current's class as the default classloader when creating the JMXConnector
JMXConnector jmxConnector = GeronimoJMXConnectorFactory.create(connectorInfo, this.getClass().getClassLoader());
return jmxConnector.getMBeanServerConnection();
}
public ObjectName getMBean(MBeanServerConnection connection, String mbeanName, String name) throws Exception {
Set<ObjectName> objectNameSet =
connection.queryNames(new ObjectName(mbeanName), null);
if (objectNameSet.isEmpty()) {
throw new Exception(Messages.bind(Messages.mBeanNotFound, name));
} else if (objectNameSet.size() == 1) {
return objectNameSet.iterator().next();
} else {
throw new Exception(Messages.bind(Messages.multipleMBeansFound, name));
}
}
private ObjectName getFrameworkMBean(MBeanServerConnection connection) throws Exception {
return getMBean(connection, "osgi.core:type=framework,*", "Framework");
}
public Target[] getTargets() {
return null;
}
public static String deltaKindToString(int kind) {
switch(kind) {
case NO_CHANGE:
return "NO_CHANGE";
case ADDED:
return "ADDED";
case CHANGED:
return "CHANGED";
case REMOVED:
return "REMOVED";
}
return Integer.toString(kind);
}
public static String publishKindToString(int kind) {
switch(kind) {
case IServer.PUBLISH_AUTO:
return "Auto";
case IServer.PUBLISH_CLEAN:
return "Clean";
case IServer.PUBLISH_FULL:
return "Full";
case IServer.PUBLISH_INCREMENTAL:
return "Incremental";
}
return Integer.toString(kind);
}
public String getConfigId(IModule module) throws Exception {
return getGeronimoServer().getVersionHandler().getConfigID(module);
}
private IStatus updateSharedLib(IModule[] module, IProgressMonitor monitor) {
IDataModel model = DataModelFactory.createDataModel(new SharedLibEntryDataModelProvider());
model.setProperty(ISharedLibEntryCreationDataModelProperties.MODULES, module);
model.setProperty(ISharedLibEntryCreationDataModelProperties.SERVER, getServer());
IDataModelOperation op = new SharedLibEntryCreationOperation(model);
try {
op.execute(monitor, null);
} catch (ExecutionException e) {
return new Status(IStatus.ERROR, Activator.PLUGIN_ID, 0, e.getMessage(), e.getCause());
}
return Status.OK_STATUS;
}
@Override
public void startModule(IModule[] module, IProgressMonitor monitor) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.startModule", Arrays.asList(module));
try {
getModuleHandler(module[0]).doStartModule(module, monitor);
} catch (Exception e) {
Trace.trace(Trace.ERROR, "Error starting module " + module[0].getName(), e, Activator.logCore);
throw new RuntimeException("Error starting module " + module[0].getName(), e);
}
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.startModule");
}
@Override
public void stopModule(IModule[] module, IProgressMonitor monitor) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.stopModule", Arrays.asList(module));
try {
getModuleHandler(module[0]).doStopModule(module, monitor);
} catch (Exception e) {
Trace.trace(Trace.ERROR, "Error stopping module " + module[0].getName(), e, Activator.logCore);
throw new RuntimeException("Error stopping module " + module[0].getName(), e);
}
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.stopModule");
}
@Override
public void restartModule(IModule[] module, IProgressMonitor monitor) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.restartModule", Arrays.asList(module));
try {
getModuleHandler(module[0]).doRestartModule(module, monitor);
} catch (Exception e) {
Trace.trace(Trace.ERROR, "Error restarting module " + module[0].getName(), e, Activator.logCore);
throw new RuntimeException("Error restarting module " + module[0].getName(), e);
}
Trace.tracePoint("Exit ", Activator.traceCore, "GeronimoServerBehaviourDelegate.restartModule");
}
@Override
public boolean canControlModule(IModule[] module) {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.canControlModule", Arrays.asList(module));
// Enable start/stop for top-level modules only
if (module.length == 1) {
if (GeronimoUtils.isFragmentBundleModule(module[0])) {
// fragment bundles cannot be started/stopped
return false;
} else {
return true;
}
} else {
return false;
}
}
public void setModulesState(IModule[] module, int state) {
setModuleState(module, state);
}
public IPath getPublishDirectory(IModule[] module) {
if (module == null || module.length == 0)
return null;
if (getGeronimoServer().isRunFromWorkspace()) {
// TODO fix me, see if project root, component root, or output
// container should be returned
return module[module.length - 1].getProject().getLocation();
} else {
ClassLoader old = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(getContextClassLoader());
String configId = getConfigId(module[0]);
Artifact artifact = Artifact.create(configId);
AbstractName name = Configuration.getConfigurationAbstractName(artifact);
GBeanData data = kernel.getGBeanData(name);
URL url = (URL) data.getAttribute("baseURL");
return getModulePath(module, url);
} catch (InvalidConfigException e) {
e.printStackTrace();
} catch (GBeanNotFoundException e) {
e.printStackTrace();
} catch (InternalKernelException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
} finally {
Thread.currentThread().setContextClassLoader(old);
}
}
return null;
}
// TODO: this can be cached
public String getWebModuleDocumentBase(String contextPath) {
Kernel kernel = getDeploymentManagerKernel();
if (kernel == null) {
Trace.trace(Trace.WARNING, "Error getting web module document base - no kernel", null, Activator.logCore);
return null;
}
Map<String, String> map = Collections.singletonMap("j2eeType", "WebModule");
if (!contextPath.startsWith("/")) {
contextPath = "/" + contextPath;
}
AbstractNameQuery query = new AbstractNameQuery(null, map, Collections.EMPTY_SET);
Set<AbstractName> webModuleNames = kernel.listGBeans(query);
for (AbstractName name : webModuleNames) {
try {
String moduleContextPath = (String) kernel.getAttribute(name, "contextPath");
if (contextPath.equals(moduleContextPath)) {
String docBase = (String) kernel.getAttribute(name, "docBase");
return (docBase != null && docBase.length() > 0) ? docBase : null;
}
} catch (GBeanNotFoundException e) {
// ignore
} catch (NoSuchAttributeException e) {
// ignore
} catch (Exception e) {
Trace.trace(Trace.WARNING, "Error getting web module document base", e, Activator.logCore);
}
}
return null;
}
public OSGiModuleHandler getOsgiModuleHandler() {
return osgiModuleHandler;
}
public boolean isPublished(IModule[] module) {
return super.hasBeenPublished(module);
}
public boolean hasChanged(IModule rootModule) {
IModule[] module = new IModule [] { rootModule };
IModuleResourceDelta[] deltaArray = getPublishedResourceDelta(module);
if (deltaArray != null && deltaArray.length > 0) {
return true;
}
IModule[] childModules = getServerDelegate().getChildModules(module);
if (childModules != null) {
for (IModule childModule : childModules) {
deltaArray = getPublishedResourceDelta(new IModule[] {rootModule, childModule});
if (deltaArray != null && deltaArray.length > 0) {
return true;
}
}
}
return false;
}
public Set<String> getModifiedConfigIds() {
Trace.tracePoint("Entry", Activator.traceCore, "GeronimoServerBehaviourDelegate.getModifiedConfigIds");
IServer server = getServer();
Set<String> configIds = new HashSet<String>();
IModule[] modules = server.getModules();
if (modules != null) {
for (IModule module : modules) {
IModule[] rootModule = new IModule[] { module };
// only consider modules that have been published and have changed
if (isPublished(rootModule) && hasChanged(module)) {
try {
String configId = DeploymentUtils.getConfigId(server, module);
configIds.add(configId);
} catch (CoreException e) {
// ignore
}
}
}
}
Trace.tracePoint("Exit", Activator.traceCore, "GeronimoServerBehaviourDelegate.getModifiedConfigIds", configIds);
return configIds;
}
}
| trace module delta info
git-svn-id: 7b63602214189898ed4bdcfe832e41633e363f7d@1303413 13f79535-47bb-0310-9956-ffa450edef68
| plugins/org.apache.geronimo.st.v30.core/src/main/java/org/apache/geronimo/st/v30/core/GeronimoServerBehaviourDelegate.java | trace module delta info |
|
Java | apache-2.0 | bc5831529603b208a3a90a0d3dc5d6b64c5f8562 | 0 | batfish/batfish,intentionet/batfish,dhalperi/batfish,arifogel/batfish,dhalperi/batfish,arifogel/batfish,intentionet/batfish,intentionet/batfish,intentionet/batfish,batfish/batfish,batfish/batfish,intentionet/batfish,dhalperi/batfish,arifogel/batfish | package org.batfish.datamodel;
import static org.batfish.datamodel.ForwardingAnalysisImpl.computeInterfaceHostSubnetIps;
import static org.batfish.datamodel.ForwardingAnalysisImpl.computeMatchingIps;
import static org.batfish.datamodel.matchers.AclIpSpaceMatchers.hasLines;
import static org.batfish.datamodel.matchers.AclIpSpaceMatchers.isAclIpSpaceThat;
import static org.batfish.datamodel.matchers.IpSpaceMatchers.containsIp;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.not;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedMap;
import com.google.common.collect.ImmutableSortedSet;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import org.batfish.common.topology.TopologyUtil;
import org.junit.Before;
import org.junit.Test;
public class ForwardingAnalysisImplTest {
private static final String CONFIG1 = "config1";
private static final String VRF1 = "vrf1";
private static final String INTERFACE1 = "interface1";
private static final String INTERFACE2 = "interface2";
private static final IpSpace IPSPACE1 = new MockIpSpace(1);
private static final IpSpace IPSPACE2 = new MockIpSpace(2);
private static final Prefix P1 = Prefix.parse("1.0.0.0/8");
private static final Prefix P2 = Prefix.parse("2.0.0.0/16");
private static final Prefix P3 = Prefix.parse("3.0.0.0/24");
private Map<String, Map<String, IpSpace>> _arpReplies;
private Map<Edge, IpSpace> _arpTrueEdge;
private Map<Edge, IpSpace> _arpTrueEdgeDestIp;
private Map<Edge, IpSpace> _arpTrueEdgeNextHopIp;
private Configuration.Builder _cb;
private Interface.Builder _ib;
private Map<String, Map<String, Set<Ip>>> _interfaceOwnedIps = ImmutableMap.of();
private Map<String, Map<String, IpSpace>> _ipsRoutedOutInterfaces;
private Map<String, Map<String, Map<String, IpSpace>>> _neighborUnreachableOrExitsNetwork;
private Map<String, Map<String, Map<String, IpSpace>>> _arpFalseDestIp;
private Map<String, Map<String, Map<String, IpSpace>>> _arpFalseNextHopIp;
private NetworkFactory _nf;
private Map<String, Map<String, IpSpace>> _nullRoutedIps;
private Map<String, Map<String, IpSpace>> _routableIps;
private Map<String, Map<String, Map<String, Set<AbstractRoute>>>> _routesWhereDstIpCanBeArpIp;
private Map<Edge, Set<AbstractRoute>> _routesWithDestIpEdge;
private Map<String, Map<String, Map<String, Set<AbstractRoute>>>> _routesWithNextHop;
private Map<String, Map<String, Map<String, Set<AbstractRoute>>>> _routesWithNextHopIpArpFalse;
private Map<Edge, Set<AbstractRoute>> _routesWithNextHopIpArpTrue;
private Map<String, Map<String, IpSpace>> _someoneReplies;
private Map<String, Map<String, Map<String, IpSpace>>> _interfaceHostSubnetIps =
ImmutableMap.of();
private Map<String, Set<String>> _interfacesWithMissingDevices;
private Map<String, Map<String, Map<String, Set<AbstractRoute>>>>
_routesWithUnownedNextHopIpArpFalse;
private Map<String, Map<String, Map<String, Set<AbstractRoute>>>>
_routesWithOwnedNextHopIpArpFalse;
private Map<String, Map<String, Map<String, IpSpace>>> _dstIpsWithUnownedNextHopIpArpFalse;
private Map<String, Map<String, Map<String, IpSpace>>> _dstIpsWithOwnedNextHopIpArpFalse;
private Vrf.Builder _vb;
private IpSpace _internalIps = EmptyIpSpace.INSTANCE;
private ForwardingAnalysisImpl initForwardingAnalysisImpl() {
return new ForwardingAnalysisImpl(
_arpReplies,
_arpTrueEdge,
_arpTrueEdgeDestIp,
_arpTrueEdgeNextHopIp,
_interfaceOwnedIps,
_ipsRoutedOutInterfaces,
_neighborUnreachableOrExitsNetwork,
_arpFalseDestIp,
_arpFalseNextHopIp,
_nullRoutedIps,
_routableIps,
_routesWhereDstIpCanBeArpIp,
_routesWithDestIpEdge,
_routesWithNextHop,
_routesWithNextHopIpArpFalse,
_routesWithNextHopIpArpTrue,
_someoneReplies,
_interfaceHostSubnetIps,
_interfacesWithMissingDevices,
_routesWithUnownedNextHopIpArpFalse,
_routesWithOwnedNextHopIpArpFalse,
_dstIpsWithUnownedNextHopIpArpFalse,
_dstIpsWithOwnedNextHopIpArpFalse,
_internalIps);
}
@Before
public void setup() {
_nf = new NetworkFactory();
_cb = _nf.configurationBuilder().setConfigurationFormat(ConfigurationFormat.CISCO_IOS);
_vb = _nf.vrfBuilder();
_ib = _nf.interfaceBuilder();
}
@Test
public void testComputeArpReplies() {
Configuration c1 = _cb.build();
Configuration c2 = _cb.build();
Map<String, Configuration> configs =
ImmutableMap.of(c1.getHostname(), c1, c2.getHostname(), c2);
Vrf vrf1 = _vb.setOwner(c1).build();
Vrf vrf2 = _vb.setOwner(c2).build();
Interface i1 =
_ib.setOwner(c1)
.setVrf(vrf1)
.setAddress(new InterfaceAddress(P1.getStartIp(), P1.getPrefixLength()))
.setProxyArp(true)
.build();
Interface i2 =
_ib.setOwner(c2)
.setVrf(vrf2)
.setAddress(new InterfaceAddress(P2.getStartIp(), P2.getPrefixLength()))
.setProxyArp(false)
.build();
IpSpace ipsRoutedOutI1 =
IpWildcardSetIpSpace.builder().including(new IpWildcard(P1), new IpWildcard(P3)).build();
IpSpace ipsRoutedOutI2 = IpWildcardSetIpSpace.builder().including(new IpWildcard(P2)).build();
Map<String, Configuration> configurations =
ImmutableMap.of(c1.getHostname(), c1, c2.getHostname(), c2);
SortedMap<String, SortedMap<String, GenericRib<AbstractRoute>>> ribs =
ImmutableSortedMap.of(
c1.getHostname(),
ImmutableSortedMap.of(
vrf1.getName(), MockRib.builder().setRoutableIps(UniverseIpSpace.INSTANCE).build()),
c2.getHostname(),
ImmutableSortedMap.of(
vrf2.getName(),
MockRib.builder().setRoutableIps(UniverseIpSpace.INSTANCE).build()));
_ipsRoutedOutInterfaces =
ImmutableMap.of(
c1.getHostname(),
ImmutableMap.of(i1.getName(), ipsRoutedOutI1),
c2.getHostname(),
ImmutableMap.of(i2.getName(), ipsRoutedOutI2));
_interfaceOwnedIps = TopologyUtil.computeInterfaceOwnedIps(configs, false);
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, Map<String, IpSpace>> result =
forwardingAnalysisImpl.computeArpReplies(configurations, ribs);
/* Proxy-arp: Match interface IP, reject what's routed through i1, accept everything else*/
assertThat(
result,
hasEntry(
equalTo(c1.getHostname()),
hasEntry(equalTo(i1.getName()), containsIp(P1.getStartIp()))));
assertThat(
result,
hasEntry(
equalTo(c1.getHostname()),
hasEntry(equalTo(i1.getName()), not(containsIp(P1.getEndIp())))));
assertThat(
result,
hasEntry(
equalTo(c1.getHostname()),
hasEntry(equalTo(i1.getName()), not(containsIp(P3.getStartIp())))));
assertThat(
result,
hasEntry(
equalTo(c1.getHostname()),
hasEntry(equalTo(i1.getName()), containsIp(P2.getStartIp()))));
/* No proxy-arp: just match interface ip*/
assertThat(
result,
hasEntry(
equalTo(c2.getHostname()),
hasEntry(equalTo(i2.getName()), containsIp(P2.getStartIp()))));
assertThat(
result,
hasEntry(
equalTo(c2.getHostname()),
hasEntry(equalTo(i2.getName()), not(containsIp(P2.getEndIp())))));
assertThat(
result,
hasEntry(
equalTo(c2.getHostname()),
hasEntry(equalTo(i2.getName()), not(containsIp(P3.getStartIp())))));
assertThat(
result,
hasEntry(
equalTo(c2.getHostname()),
hasEntry(equalTo(i2.getName()), not(containsIp(P1.getStartIp())))));
}
@Test
public void testComputeArpRepliesByInterface() {
Configuration config = _cb.build();
_ib.setOwner(config);
_vb.setOwner(config);
Vrf vrf1 = _vb.build();
Vrf vrf2 = _vb.build();
Interface i1 =
_ib.setVrf(vrf1)
.setAddress(new InterfaceAddress(P1.getStartIp(), P1.getPrefixLength()))
.setProxyArp(true)
.build();
Interface i2 =
_ib.setVrf(vrf2)
.setAddress(new InterfaceAddress(P2.getStartIp(), P2.getPrefixLength()))
.setProxyArp(false)
.build();
Interface i3 = _ib.setAddress(null).setProxyArp(true).build();
IpSpace ipsRoutedOutI1 =
IpWildcardSetIpSpace.builder().including(new IpWildcard(P1), new IpWildcard(P3)).build();
IpSpace ipsRoutedOutI2 = IpWildcardSetIpSpace.builder().including(new IpWildcard(P2)).build();
IpSpace ipsRoutedOutI3 = EmptyIpSpace.INSTANCE;
Map<String, Interface> interfaces =
ImmutableMap.of(i1.getName(), i1, i2.getName(), i2, i3.getName(), i3);
Map<String, IpSpace> routableIpsByVrf =
ImmutableMap.of(
vrf1.getName(), UniverseIpSpace.INSTANCE, vrf2.getName(), UniverseIpSpace.INSTANCE);
Map<String, IpSpace> ipsRoutedOutInterfaces =
ImmutableMap.of(
i1.getName(),
ipsRoutedOutI1,
i2.getName(),
ipsRoutedOutI2,
i3.getName(),
ipsRoutedOutI3);
Map<String, Configuration> configs = ImmutableMap.of(config.getHostname(), config);
_interfaceOwnedIps = TopologyUtil.computeInterfaceOwnedIps(configs, false);
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, IpSpace> result =
forwardingAnalysisImpl.computeArpRepliesByInterface(
interfaces, routableIpsByVrf, ipsRoutedOutInterfaces);
/* Proxy-arp: Match interface IP, reject what's routed through i1, accept everything else*/
assertThat(result, hasEntry(equalTo(i1.getName()), containsIp(P1.getStartIp())));
assertThat(result, hasEntry(equalTo(i1.getName()), not(containsIp(P1.getEndIp()))));
assertThat(result, hasEntry(equalTo(i1.getName()), not(containsIp(P3.getStartIp()))));
assertThat(result, hasEntry(equalTo(i1.getName()), containsIp(P2.getStartIp())));
/* No proxy-arp: just match interface ip*/
assertThat(result, hasEntry(equalTo(i2.getName()), containsIp(P2.getStartIp())));
assertThat(result, hasEntry(equalTo(i2.getName()), not(containsIp(P2.getEndIp()))));
assertThat(result, hasEntry(equalTo(i2.getName()), not(containsIp(P3.getStartIp()))));
assertThat(result, hasEntry(equalTo(i2.getName()), not(containsIp(P1.getStartIp()))));
/* No interface IPs: reject everything */
assertThat(result, hasEntry(equalTo(i3.getName()), equalTo(EmptyIpSpace.INSTANCE)));
}
@Test
public void testComputeArpReplies_VRRP() {
Configuration c = _cb.build();
Map<String, Configuration> configs = ImmutableMap.of(c.getHostname(), c);
_ib.setOwner(c);
Vrf vrf1 = _vb.build();
Vrf vrf2 = _vb.build();
Interface i1 =
_ib.setVrf(vrf1)
.setAddress(new InterfaceAddress(P1.getStartIp(), P1.getPrefixLength()))
.setVrrpGroups(
ImmutableSortedMap.of(
1,
VrrpGroup.builder()
.setName(1)
.setPriority(100)
.setVirtualAddress(new InterfaceAddress("1.1.1.1/32"))
.build()))
.build();
Interface i2 =
_ib.setVrf(vrf2)
.setAddress(new InterfaceAddress(P1.getEndIp(), P1.getPrefixLength()))
.setVrrpGroups(
ImmutableSortedMap.of(
1,
VrrpGroup.builder()
.setName(1)
.setPriority(110)
.setVirtualAddress(new InterfaceAddress("1.1.1.1/32"))
.build()))
.build();
_interfaceOwnedIps = TopologyUtil.computeInterfaceOwnedIps(configs, false);
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
IpSpace p1IpSpace = new IpWildcard(P1).toIpSpace();
IpSpace i1ArpReplies =
forwardingAnalysisImpl.computeInterfaceArpReplies(i1, UniverseIpSpace.INSTANCE, p1IpSpace);
IpSpace i2ArpReplies =
forwardingAnalysisImpl.computeInterfaceArpReplies(i2, UniverseIpSpace.INSTANCE, p1IpSpace);
assertThat(i1ArpReplies, not(containsIp(Ip.parse("1.1.1.1"))));
assertThat(i2ArpReplies, containsIp(Ip.parse("1.1.1.1")));
}
@Test
public void testComputeArpTrueEdge() {
IpSpace nextHopIpSpace = new MockIpSpace(1);
IpSpace dstIpSpace = new MockIpSpace(2);
Edge e1 = Edge.of("c1", "i1", "c2", "i2");
_arpTrueEdgeDestIp = ImmutableMap.of(e1, dstIpSpace);
_arpTrueEdgeNextHopIp = ImmutableMap.of(e1, nextHopIpSpace);
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<Edge, IpSpace> result = forwardingAnalysisImpl.computeArpTrueEdge();
assertThat(
result,
hasEntry(
equalTo(e1),
isAclIpSpaceThat(
hasLines(
containsInAnyOrder(
AclIpSpaceLine.permit(nextHopIpSpace),
AclIpSpaceLine.permit(dstIpSpace))))));
}
@Test
public void testComputeArpTrueEdgeDestIp() {
Configuration c1 = _cb.build();
Configuration c2 = _cb.build();
Vrf vrf1 = _vb.setOwner(c1).build();
Vrf vrf2 = _vb.setOwner(c2).build();
Interface i1 = _ib.setOwner(c1).setVrf(vrf1).build();
Ip i2Ip = Ip.create(P1.getStartIp().asLong() + 1);
Interface i2 = _ib.setOwner(c2).setVrf(vrf2).build();
Map<String, Configuration> configurations =
ImmutableMap.of(c1.getHostname(), c1, c2.getHostname(), c2);
SortedMap<String, SortedMap<String, GenericRib<AbstractRoute>>> ribs =
ImmutableSortedMap.of(
c1.getHostname(),
ImmutableSortedMap.of(
vrf1.getName(),
MockRib.builder()
.setMatchingIps(
ImmutableMap.of(
P1,
AclIpSpace.rejecting(
Prefix.create(P1.getEndIp(), Prefix.MAX_PREFIX_LENGTH)
.toIpSpace())
.thenPermitting(P1.toIpSpace())
.build()))
.build()));
Edge edge = Edge.of(c1.getHostname(), i1.getName(), c2.getHostname(), i2.getName());
_routesWithDestIpEdge =
ImmutableMap.of(edge, ImmutableSet.of(new ConnectedRoute(P1, i1.getName())));
_arpReplies =
ImmutableMap.of(
c2.getHostname(),
ImmutableMap.of(
i2.getName(),
AclIpSpace.permitting(i2Ip.toIpSpace())
.thenPermitting(P1.getEndIp().toIpSpace())
.build()));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<Edge, IpSpace> result =
forwardingAnalysisImpl.computeArpTrueEdgeDestIp(configurations, computeMatchingIps(ribs));
/* Respond to request for IP on i2. */
assertThat(result, hasEntry(equalTo(edge), containsIp(i2Ip)));
/* Do not make ARP request for IP matched by more specific route not going out i1. */
assertThat(result, hasEntry(equalTo(edge), not(containsIp(P1.getEndIp()))));
/* Do not receive response for IP i2 does not own. */
assertThat(result, hasEntry(equalTo(edge), not(containsIp(P1.getStartIp()))));
}
@Test
public void testComputeArpTrueEdgeNextHopIp() {
Configuration c1 = _cb.build();
Configuration c2 = _cb.build();
Vrf vrf1 = _vb.setOwner(c1).build();
Vrf vrf2 = _vb.setOwner(c2).build();
Interface i1 =
_ib.setOwner(c1)
.setVrf(vrf1)
.setAddress(new InterfaceAddress(P1.getStartIp(), P1.getPrefixLength()))
.build();
Ip i2Ip = Ip.create(P1.getStartIp().asLong() + 1);
Interface i2 =
_ib.setOwner(c2)
.setVrf(vrf2)
.setAddress(new InterfaceAddress(i2Ip, P1.getPrefixLength()))
.build();
Edge edge = Edge.of(c1.getHostname(), i1.getName(), c2.getHostname(), i2.getName());
Map<String, Configuration> configurations =
ImmutableMap.of(c1.getHostname(), c1, c2.getHostname(), c2);
SortedMap<String, SortedMap<String, GenericRib<AbstractRoute>>> ribs =
ImmutableSortedMap.of(
c1.getHostname(),
ImmutableSortedMap.of(
vrf1.getName(),
MockRib.builder()
.setMatchingIps(
ImmutableMap.of(
P1,
AclIpSpace.rejecting(
Prefix.create(P1.getEndIp(), Prefix.MAX_PREFIX_LENGTH)
.toIpSpace())
.thenPermitting(P1.toIpSpace())
.build()))
.build()));
_routesWithNextHopIpArpTrue =
ImmutableMap.of(
edge,
ImmutableSet.of(
StaticRoute.builder()
.setNetwork(P1)
.setNextHopIp(P2.getStartIp())
.setAdministrativeCost(1)
.build()));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<Edge, IpSpace> result =
forwardingAnalysisImpl.computeArpTrueEdgeNextHopIp(
configurations, computeMatchingIps(ribs));
/*
* Respond for any destination IP in network not matching more specific route not going out i1.
*/
assertThat(result, hasEntry(equalTo(edge), containsIp(P1.getStartIp())));
assertThat(result, hasEntry(equalTo(edge), containsIp(i2Ip)));
/* Do not respond for destination IP matching more specific route not going out i1 */
assertThat(result, hasEntry(equalTo(edge), not(containsIp(P1.getEndIp()))));
/* Do not respond for destination IPs not matching route */
assertThat(result, hasEntry(equalTo(edge), not(containsIp(P2.getStartIp()))));
assertThat(result, hasEntry(equalTo(edge), not(containsIp(P2.getEndIp()))));
}
@Test
public void testComputeInterfaceArpReplies() {
Configuration config = _cb.build();
_ib.setOwner(config);
InterfaceAddress primary = new InterfaceAddress(P1.getStartIp(), P1.getPrefixLength());
InterfaceAddress secondary = new InterfaceAddress(P2.getStartIp(), P2.getPrefixLength());
Interface iNoProxyArp = _ib.setAddresses(primary, secondary).build();
Interface iProxyArp = _ib.setProxyArp(true).build();
IpSpace routableIpsForThisVrf = UniverseIpSpace.INSTANCE;
IpSpace ipsRoutedThroughInterface =
IpWildcardSetIpSpace.builder().including(new IpWildcard(P1), new IpWildcard(P2)).build();
_interfaceOwnedIps =
TopologyUtil.computeInterfaceOwnedIps(ImmutableMap.of(config.getHostname(), config), false);
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
IpSpace noProxyArpResult =
forwardingAnalysisImpl.computeInterfaceArpReplies(
iNoProxyArp, routableIpsForThisVrf, ipsRoutedThroughInterface);
IpSpace proxyArpResult =
forwardingAnalysisImpl.computeInterfaceArpReplies(
iProxyArp, routableIpsForThisVrf, ipsRoutedThroughInterface);
/* No proxy-ARP */
/* Accept IPs belonging to interface */
assertThat(noProxyArpResult, containsIp(P1.getStartIp()));
assertThat(noProxyArpResult, containsIp(P2.getStartIp()));
/* Reject all other IPs */
assertThat(noProxyArpResult, not(containsIp(P1.getEndIp())));
assertThat(noProxyArpResult, not(containsIp(P2.getEndIp())));
assertThat(noProxyArpResult, not(containsIp(P3.getStartIp())));
/* Proxy-ARP */
/* Accept IPs belonging to interface */
assertThat(proxyArpResult, containsIp(P1.getStartIp()));
assertThat(proxyArpResult, containsIp(P2.getStartIp()));
/* Reject IPs routed through interface */
assertThat(proxyArpResult, not(containsIp(P1.getEndIp())));
assertThat(proxyArpResult, not(containsIp(P2.getEndIp())));
/* Accept all other routable IPs */
assertThat(proxyArpResult, containsIp(P3.getStartIp()));
}
@Test
public void testComputeIpsAssignedToThisInterface() {
Configuration config = _cb.build();
Map<String, Configuration> configs = ImmutableMap.of(config.getHostname(), config);
_ib.setOwner(config);
InterfaceAddress primary = new InterfaceAddress(P1.getStartIp(), P1.getPrefixLength());
InterfaceAddress secondary = new InterfaceAddress(P2.getStartIp(), P2.getPrefixLength());
Interface i = _ib.setAddresses(primary, secondary).build();
_interfaceOwnedIps = TopologyUtil.computeInterfaceOwnedIps(configs, false);
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
IpSpace result = forwardingAnalysisImpl.computeIpsAssignedToThisInterface(i);
assertThat(result, containsIp(P1.getStartIp()));
assertThat(result, containsIp(P2.getStartIp()));
assertThat(result, not(containsIp(P2.getEndIp())));
}
@Test
public void testComputeIpsRoutedOutInterfaces() {
String c1 = "c1";
String v1 = "v1";
String i1 = "i1";
ConnectedRoute r1 = new ConnectedRoute(P1, i1);
StaticRoute nullRoute =
StaticRoute.builder()
.setNetwork(P2)
.setNextHopInterface(Interface.NULL_INTERFACE_NAME)
.setAdministrativeCost(1)
.build();
SortedMap<String, SortedMap<String, GenericRib<AbstractRoute>>> ribs =
ImmutableSortedMap.of(
c1,
ImmutableSortedMap.of(
v1,
MockRib.builder()
.setMatchingIps(ImmutableMap.of(P1, P1.toIpSpace(), P2, P2.toIpSpace()))
.build()));
_routesWithNextHop =
ImmutableMap.of(
c1,
ImmutableMap.of(
v1,
ImmutableMap.of(
i1,
ImmutableSet.of(r1),
Interface.NULL_INTERFACE_NAME,
ImmutableSet.of(nullRoute))));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, Map<String, IpSpace>> result =
forwardingAnalysisImpl.computeIpsRoutedOutInterfaces(computeMatchingIps(ribs));
/* Should contain IPs matching the route */
assertThat(result, hasEntry(equalTo(c1), hasEntry(equalTo(i1), containsIp(P1.getStartIp()))));
assertThat(result, hasEntry(equalTo(c1), hasEntry(equalTo(i1), containsIp(P1.getEndIp()))));
/* Should not contain IP not matching the route */
assertThat(
result, hasEntry(equalTo(c1), hasEntry(equalTo(i1), not(containsIp(P2.getStartIp())))));
/* Null interface should be excluded because we would not be able to tie back to single VRF. */
assertThat(result, hasEntry(equalTo(c1), not(hasKey(equalTo(Interface.NULL_INTERFACE_NAME)))));
}
@Test
public void testComputeNeighborUnreachableOrExitsNetwork() {
String c1 = "c1";
String v1 = "v1";
String i1 = "i1";
_arpFalseDestIp =
ImmutableMap.of(c1, ImmutableMap.of(v1, ImmutableMap.of(i1, P1.getStartIp().toIpSpace())));
_arpFalseNextHopIp =
ImmutableMap.of(c1, ImmutableMap.of(v1, ImmutableMap.of(i1, P1.getEndIp().toIpSpace())));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, Map<String, Map<String, IpSpace>>> result =
forwardingAnalysisImpl.computeArpFalse();
/* Should contain both IPs. */
assertThat(
result,
hasEntry(
equalTo(c1),
hasEntry(equalTo(v1), hasEntry(equalTo(i1), containsIp(P1.getStartIp())))));
assertThat(
result,
hasEntry(
equalTo(c1), hasEntry(equalTo(v1), hasEntry(equalTo(i1), containsIp(P1.getEndIp())))));
/* Should not contain unrelated IPs. */
assertThat(
result,
hasEntry(
equalTo(c1),
hasEntry(equalTo(v1), hasEntry(equalTo(i1), not(containsIp(P2.getEndIp()))))));
}
@Test
public void testComputeArpFalseDestIp() {
String c1 = "c1";
String v1 = "v1";
String i1 = "i1";
AbstractRoute ifaceRoute = new ConnectedRoute(P1, i1);
SortedMap<String, SortedMap<String, GenericRib<AbstractRoute>>> ribs =
ImmutableSortedMap.of(
c1,
ImmutableSortedMap.of(
v1, MockRib.builder().setMatchingIps(ImmutableMap.of(P1, P1.toIpSpace())).build()));
_routesWhereDstIpCanBeArpIp =
ImmutableMap.of(c1, ImmutableMap.of(v1, ImmutableMap.of(i1, ImmutableSet.of(ifaceRoute))));
_someoneReplies = ImmutableMap.of(c1, ImmutableMap.of(i1, P1.getEndIp().toIpSpace()));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, Map<String, Map<String, IpSpace>>> result =
forwardingAnalysisImpl.computeArpFalseDestIp(computeMatchingIps(ribs));
/* Should contain IP in the route's prefix that sees no reply */
assertThat(
result,
hasEntry(
equalTo(c1),
hasEntry(equalTo(v1), hasEntry(equalTo(i1), containsIp(P1.getStartIp())))));
/* Should not contain IP in the route's prefix that sees reply */
assertThat(
result,
hasEntry(
equalTo(c1),
hasEntry(equalTo(v1), hasEntry(equalTo(i1), not(containsIp(P1.getEndIp()))))));
/* Should not contain other IPs */
assertThat(
result,
hasEntry(
equalTo(c1),
hasEntry(equalTo(v1), hasEntry(equalTo(i1), not(containsIp(P2.getEndIp()))))));
}
@Test
public void testComputeArpFalseDestIpNoNeighbors() {
String c1 = "c1";
String v1 = "v1";
String i1 = "i1";
AbstractRoute ifaceRoute = new ConnectedRoute(P1, i1);
SortedMap<String, SortedMap<String, GenericRib<AbstractRoute>>> ribs =
ImmutableSortedMap.of(
c1,
ImmutableSortedMap.of(
v1, MockRib.builder().setMatchingIps(ImmutableMap.of(P1, P1.toIpSpace())).build()));
_routesWhereDstIpCanBeArpIp =
ImmutableMap.of(c1, ImmutableMap.of(v1, ImmutableMap.of(i1, ImmutableSet.of(ifaceRoute))));
_someoneReplies = ImmutableMap.of();
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, Map<String, Map<String, IpSpace>>> result =
forwardingAnalysisImpl.computeArpFalseDestIp(computeMatchingIps(ribs));
/*
* Since _someoneReplies is empty, all IPs for which longest-prefix-match route has no
* next-hop-ip should be in the result space.
*/
assertThat(
result,
hasEntry(
equalTo(c1),
hasEntry(equalTo(v1), hasEntry(equalTo(i1), containsIp(P1.getStartIp())))));
assertThat(
result,
hasEntry(
equalTo(c1), hasEntry(equalTo(v1), hasEntry(equalTo(i1), containsIp(P1.getEndIp())))));
/* Should not contain other IPs */
assertThat(
result,
hasEntry(
equalTo(c1),
hasEntry(equalTo(v1), hasEntry(equalTo(i1), not(containsIp(P2.getEndIp()))))));
}
@Test
public void testComputeNeighborUnreachableArpNextHopIp() {
String c1 = "c1";
String v1 = "v1";
String i1 = "i1";
AbstractRoute r1 =
StaticRoute.builder()
.setNetwork(P1)
.setNextHopIp(P2.getStartIp())
.setAdministrativeCost(1)
.build();
_routesWithNextHop =
ImmutableMap.of(c1, ImmutableMap.of(v1, ImmutableMap.of(i1, ImmutableSet.of(r1))));
_routesWithNextHopIpArpFalse =
ImmutableMap.of(c1, ImmutableMap.of(v1, ImmutableMap.of(i1, ImmutableSet.of(r1))));
SortedMap<String, SortedMap<String, GenericRib<AbstractRoute>>> ribs =
ImmutableSortedMap.of(
c1,
ImmutableSortedMap.of(
v1, MockRib.builder().setMatchingIps(ImmutableMap.of(P1, P1.toIpSpace())).build()));
/*
_interfaceHostSubnetIps =
ImmutableMap.of(c1, ImmutableMap.of(v1, ImmutableMap.of(i1, EmptyIpSpace.INSTANCE)));
*/
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, Map<String, Map<String, IpSpace>>> result =
forwardingAnalysisImpl.computeArpFalseNextHopIp(computeMatchingIps(ribs));
/* IPs matching some route on interface with no response should appear */
assertThat(
result,
hasEntry(
equalTo(c1),
hasEntry(equalTo(v1), hasEntry(equalTo(i1), containsIp(P1.getStartIp())))));
assertThat(
result,
hasEntry(
equalTo(c1), hasEntry(equalTo(v1), hasEntry(equalTo(i1), containsIp(P1.getEndIp())))));
/* Other IPs should not appear */
assertThat(
result,
hasEntry(
equalTo(c1),
hasEntry(equalTo(v1), hasEntry(equalTo(i1), not(containsIp(P2.getStartIp()))))));
}
@Test
public void testComputeNullRoutedIps() {
String c1 = "c1";
String v1 = "v1";
String i1 = "i1";
GenericRib<AbstractRoute> rib1 =
MockRib.builder()
.setMatchingIps(
ImmutableMap.of(
P1,
AclIpSpace.permitting(P1.toIpSpace()).build(),
P2,
AclIpSpace.permitting(P2.toIpSpace()).build()))
.build();
SortedMap<String, SortedMap<String, GenericRib<AbstractRoute>>> ribs =
ImmutableSortedMap.of(c1, ImmutableSortedMap.of(v1, rib1));
AbstractRoute nullRoute =
StaticRoute.builder()
.setNextHopInterface(Interface.NULL_INTERFACE_NAME)
.setNetwork(P1)
.setAdministrativeCost(1)
.build();
AbstractRoute otherRoute = new ConnectedRoute(P2, i1);
Map<String, Map<String, Fib>> fibs =
ImmutableSortedMap.of(
c1,
ImmutableSortedMap.of(
v1,
MockFib.builder()
.setNextHopInterfaces(
ImmutableMap.of(
nullRoute,
ImmutableMap.of(
Interface.NULL_INTERFACE_NAME,
ImmutableMap.of(
Route.UNSET_ROUTE_NEXT_HOP_IP, ImmutableSet.of(nullRoute))),
otherRoute,
ImmutableMap.of(
i1,
ImmutableMap.of(
Route.UNSET_ROUTE_NEXT_HOP_IP, ImmutableSet.of(otherRoute)))))
.build()));
Map<String, Map<String, IpSpace>> result =
ForwardingAnalysisImpl.computeNullRoutedIps(computeMatchingIps(ribs), fibs);
/* IPs for the null route should appear */
assertThat(result, hasEntry(equalTo(c1), hasEntry(equalTo(v1), containsIp(P1.getStartIp()))));
assertThat(result, hasEntry(equalTo(c1), hasEntry(equalTo(v1), containsIp(P1.getEndIp()))));
/* IPs for the non-null route should not appear */
assertThat(
result, hasEntry(equalTo(c1), hasEntry(equalTo(v1), not(containsIp(P2.getStartIp())))));
assertThat(
result, hasEntry(equalTo(c1), hasEntry(equalTo(v1), not(containsIp(P2.getEndIp())))));
}
/**
* The neighbor unreachable or exits network predicate map should not include an entry for null
* interface.
*/
@Test
public void testComputeNeighborUnreachbleOrExitsNetwork_nullInterface() {
NetworkFactory nf = new NetworkFactory();
Configuration c =
nf.configurationBuilder().setConfigurationFormat(ConfigurationFormat.CISCO_IOS).build();
Vrf v = nf.vrfBuilder().setOwner(c).build();
StaticRoute nullRoute =
StaticRoute.builder()
.setNetwork(Prefix.parse("1.0.0.0/8"))
.setNextHopInterface(Interface.NULL_INTERFACE_NAME)
.setAdministrativeCost(1)
.build();
IpSpace ipSpace = IpWildcardSetIpSpace.builder().including(new IpWildcard("1.0.0.0/8")).build();
v.setStaticRoutes(ImmutableSortedSet.of(nullRoute));
SortedMap<String, Configuration> configs = ImmutableSortedMap.of(c.getHostname(), c);
MockRib mockRib =
MockRib.builder()
.setRoutes(ImmutableSet.of(nullRoute))
.setRoutableIps(ipSpace)
.setMatchingIps(ImmutableMap.of(Prefix.parse("1.0.0.0/8"), ipSpace))
.build();
MockFib mockFib =
MockFib.builder()
.setNextHopInterfaces(
ImmutableMap.of(
nullRoute,
ImmutableMap.of(
Interface.NULL_INTERFACE_NAME,
ImmutableMap.of(Ip.AUTO, ImmutableSet.of(nullRoute)))))
.setRoutesByNextHopInterface(
ImmutableMap.of(Interface.NULL_INTERFACE_NAME, ImmutableSet.of(nullRoute)))
.build();
SortedMap<String, SortedMap<String, GenericRib<AbstractRoute>>> ribs =
ImmutableSortedMap.of(c.getHostname(), ImmutableSortedMap.of(v.getName(), mockRib));
Map<String, Map<String, Fib>> fibs =
ImmutableMap.of(c.getHostname(), ImmutableMap.of(v.getName(), mockFib));
ForwardingAnalysisImpl forwardingAnalysisImpl =
new ForwardingAnalysisImpl(configs, ribs, fibs, new Topology(ImmutableSortedSet.of()));
Map<String, Map<String, Map<String, IpSpace>>> neighborUnreachableOrExitsNetwork =
forwardingAnalysisImpl.getNeighborUnreachableOrExitsNetwork();
assertThat(
neighborUnreachableOrExitsNetwork,
hasEntry(
equalTo(c.getHostname()),
hasEntry(equalTo(v.getName()), not(hasKey(Interface.NULL_INTERFACE_NAME)))));
}
@Test
public void testComputeRoutableIps() {
String c1 = "c1";
String v1 = "v1";
SortedMap<String, SortedMap<String, GenericRib<AbstractRoute>>> ribs =
ImmutableSortedMap.of(
c1, ImmutableSortedMap.of(v1, MockRib.builder().setRoutableIps(IPSPACE1).build()));
Map<String, Map<String, IpSpace>> result = ForwardingAnalysisImpl.computeRoutableIps(ribs);
assertThat(result, equalTo(ImmutableMap.of(c1, ImmutableMap.of(v1, IPSPACE1))));
}
@Test
public void testComputeRouteMatchConditions() {
Set<AbstractRoute> routes =
ImmutableSet.of(new ConnectedRoute(P1, INTERFACE1), new ConnectedRoute(P2, INTERFACE2));
MockRib rib =
MockRib.builder().setMatchingIps(ImmutableMap.of(P1, IPSPACE1, P2, IPSPACE2)).build();
/* Resulting IP space should permit matching IPs */
assertThat(
ForwardingAnalysisImpl.computeRouteMatchConditions(routes, rib.getMatchingIps()),
isAclIpSpaceThat(
hasLines(
containsInAnyOrder(
AclIpSpaceLine.permit(IPSPACE1), AclIpSpaceLine.permit(IPSPACE2)))));
}
@Test
public void testComputeRoutesWhereDstIpCanBeArpIp() {
String c1 = "c1";
String v1 = "v1";
String i1 = "i1";
AbstractRoute r1 =
StaticRoute.builder()
.setNetwork(P1)
.setNextHopIp(P2.getStartIp())
.setAdministrativeCost(1)
.build();
AbstractRoute ifaceRoute = new ConnectedRoute(P2, i1);
_routesWithNextHop =
ImmutableMap.of(
c1, ImmutableMap.of(v1, ImmutableMap.of(i1, ImmutableSet.of(r1, ifaceRoute))));
Map<String, Map<String, Fib>> fibs =
ImmutableMap.of(
c1,
ImmutableMap.of(
v1,
MockFib.builder()
.setNextHopInterfaces(
ImmutableMap.of(
r1,
ImmutableMap.of(
i1,
ImmutableMap.of(r1.getNextHopIp(), ImmutableSet.of(ifaceRoute))),
ifaceRoute,
ImmutableMap.of(
i1,
ImmutableMap.of(
Route.UNSET_ROUTE_NEXT_HOP_IP, ImmutableSet.of(ifaceRoute)))))
.build()));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, Map<String, Map<String, Set<AbstractRoute>>>> result =
forwardingAnalysisImpl.computeRoutesWhereDstIpCanBeArpIp(fibs);
/* Only the interface route should show up */
assertThat(
result,
equalTo(
ImmutableMap.of(
c1, ImmutableMap.of(v1, ImmutableMap.of(i1, ImmutableSet.of(ifaceRoute))))));
}
@Test
public void testComputeRoutesWithDestIpEdge() {
String c1 = "c1";
String c2 = "c2";
String v1 = "v1";
String i1 = "i1";
String i2 = "i2";
AbstractRoute r1 = new ConnectedRoute(P1, i1);
_routesWhereDstIpCanBeArpIp =
ImmutableMap.of(c1, ImmutableMap.of(v1, ImmutableMap.of(i1, ImmutableSet.of(r1))));
Edge e1 = Edge.of(c1, i1, c2, i2);
_arpReplies = ImmutableMap.of(c2, ImmutableMap.of(i2, P2.getStartIp().toIpSpace()));
Topology topology = new Topology(ImmutableSortedSet.of(e1));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<Edge, Set<AbstractRoute>> result =
forwardingAnalysisImpl.computeRoutesWithDestIpEdge(topology);
assertThat(result, equalTo(ImmutableMap.of(e1, ImmutableSet.of(r1))));
}
@Test
public void testComputeRoutesWithNextHop() {
String c1 = "c1";
String v1 = "v1";
String i1 = "i1";
ConnectedRoute r1 = new ConnectedRoute(P1, i1);
Map<String, Map<String, Fib>> fibs =
ImmutableMap.of(
c1,
ImmutableMap.of(
v1,
MockFib.builder()
.setRoutesByNextHopInterface(ImmutableMap.of(i1, ImmutableSet.of(r1)))
.build()));
Configuration config = _cb.setHostname(c1).build();
Vrf vrf = _vb.setName(v1).setOwner(config).build();
_ib.setName(i1).setVrf(vrf).setOwner(config).build();
Map<String, Map<String, Map<String, Set<AbstractRoute>>>> result =
ForwardingAnalysisImpl.computeRoutesWithNextHop(ImmutableMap.of(c1, config), fibs);
assertThat(
result,
equalTo(
ImmutableMap.of(c1, ImmutableMap.of(v1, ImmutableMap.of(i1, ImmutableSet.of(r1))))));
}
@Test
public void testComputeRoutesWithNextHopIpArpFalse() {
String c1 = "c1";
String v1 = "v1";
String i1 = "i1";
AbstractRoute r1 =
StaticRoute.builder()
.setNetwork(P1)
.setNextHopIp(P2.getStartIp())
.setAdministrativeCost(1)
.build();
_routesWithNextHop =
ImmutableMap.of(c1, ImmutableMap.of(v1, ImmutableMap.of(i1, ImmutableSet.of(r1))));
AbstractRoute ifaceRoute = new ConnectedRoute(P2, i1);
Map<String, Map<String, Fib>> fibs =
ImmutableMap.of(
c1,
ImmutableMap.of(
v1,
MockFib.builder()
.setNextHopInterfaces(
ImmutableMap.of(
r1,
ImmutableMap.of(
i1,
ImmutableMap.of(r1.getNextHopIp(), ImmutableSet.of(ifaceRoute)))))
.build()));
_someoneReplies = ImmutableMap.of(c1, ImmutableMap.of(i1, P2.getEndIp().toIpSpace()));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, Map<String, Map<String, Set<AbstractRoute>>>> result =
forwardingAnalysisImpl.computeRoutesWithNextHopIpArpFalse(fibs);
assertThat(
result,
equalTo(
ImmutableMap.of(c1, ImmutableMap.of(v1, ImmutableMap.of(i1, ImmutableSet.of(r1))))));
}
@Test
public void testComputeRoutesWithNextHopIpArpFalseForInterface() {
String hostname = "c1";
String outInterface = "i1";
AbstractRoute nextHopIpRoute1 =
StaticRoute.builder()
.setNetwork(P1)
.setNextHopIp(P2.getStartIp())
.setAdministrativeCost(1)
.build();
AbstractRoute nextHopIpRoute2 =
StaticRoute.builder()
.setNetwork(P1)
.setNextHopIp(P2.getEndIp())
.setAdministrativeCost(1)
.build();
AbstractRoute ifaceRoute = new ConnectedRoute(P2, outInterface);
Set<AbstractRoute> candidateRoutes =
ImmutableSet.of(nextHopIpRoute1, nextHopIpRoute2, ifaceRoute);
_someoneReplies =
ImmutableMap.of(hostname, ImmutableMap.of(outInterface, P2.getStartIp().toIpSpace()));
Fib fib =
MockFib.builder()
.setNextHopInterfaces(
ImmutableMap.of(
nextHopIpRoute1,
ImmutableMap.of(
outInterface,
ImmutableMap.of(
nextHopIpRoute1.getNextHopIp(), ImmutableSet.of(ifaceRoute))),
nextHopIpRoute2,
ImmutableMap.of(
outInterface,
ImmutableMap.of(
nextHopIpRoute2.getNextHopIp(), ImmutableSet.of(ifaceRoute))),
ifaceRoute,
ImmutableMap.of(
outInterface,
ImmutableMap.of(
Route.UNSET_ROUTE_NEXT_HOP_IP, ImmutableSet.of(ifaceRoute)))))
.build();
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Set<AbstractRoute> result =
forwardingAnalysisImpl.computeRoutesWithNextHopIpArpFalseForInterface(
fib, hostname, outInterface, candidateRoutes);
/*
* Should only contain nextHopIpRoute1 since it is the only route with a next-hop-ip for which
* there is no ARP reply.
*/
assertThat(result, equalTo(ImmutableSet.of(nextHopIpRoute2)));
}
@Test
public void testComputeRoutesWithNextHopIpArpFalseForInterfaceNoNeighbors() {
String hostname = "c1";
String outInterface = "i1";
AbstractRoute nextHopIpRoute1 =
StaticRoute.builder()
.setNetwork(P1)
.setNextHopIp(P2.getStartIp())
.setAdministrativeCost(1)
.build();
AbstractRoute nextHopIpRoute2 =
StaticRoute.builder()
.setNetwork(P1)
.setNextHopIp(P2.getEndIp())
.setAdministrativeCost(1)
.build();
AbstractRoute ifaceRoute = new ConnectedRoute(P2, outInterface);
Set<AbstractRoute> candidateRoutes =
ImmutableSet.of(nextHopIpRoute1, nextHopIpRoute2, ifaceRoute);
_someoneReplies = ImmutableMap.of();
Fib fib =
MockFib.builder()
.setNextHopInterfaces(
ImmutableMap.of(
nextHopIpRoute1,
ImmutableMap.of(
outInterface,
ImmutableMap.of(
nextHopIpRoute1.getNextHopIp(), ImmutableSet.of(ifaceRoute))),
nextHopIpRoute2,
ImmutableMap.of(
outInterface,
ImmutableMap.of(
nextHopIpRoute2.getNextHopIp(), ImmutableSet.of(ifaceRoute))),
ifaceRoute,
ImmutableMap.of(
outInterface,
ImmutableMap.of(
Route.UNSET_ROUTE_NEXT_HOP_IP, ImmutableSet.of(ifaceRoute)))))
.build();
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Set<AbstractRoute> result =
forwardingAnalysisImpl.computeRoutesWithNextHopIpArpFalseForInterface(
fib, hostname, outInterface, candidateRoutes);
/*
* Should contain both nextHopIpRoute1 and nextHopIpRoute2, since:
* 1) They are the only routes with a next hop IP.
* 2) Their next hop IPs do not receive ARP reply since _someoneReplies is empty.
*/
assertThat(result, equalTo(ImmutableSet.of(nextHopIpRoute1, nextHopIpRoute2)));
}
@Test
public void testComputeRoutesWithNextHopIpArpTrue() {
String c1 = "c1";
String i1 = "i1";
String c2 = "c2";
String i2 = "i2";
Edge e1 = Edge.of(c1, i1, c2, i2);
_arpReplies = ImmutableMap.of(c2, ImmutableMap.of(i2, P2.getStartIp().toIpSpace()));
Topology topology = new Topology(ImmutableSortedSet.of(e1));
String v1 = "v1";
AbstractRoute r1 =
StaticRoute.builder()
.setNetwork(P1)
.setNextHopIp(P2.getStartIp())
.setAdministrativeCost(1)
.build();
AbstractRoute r2 =
StaticRoute.builder()
.setNetwork(P1)
.setNextHopIp(P2.getEndIp())
.setAdministrativeCost(1)
.build();
_routesWithNextHop =
ImmutableMap.of(c1, ImmutableMap.of(v1, ImmutableMap.of(i1, ImmutableSet.of(r1, r2))));
AbstractRoute ifaceRoute = new ConnectedRoute(P2, i1);
Map<String, Map<String, Fib>> fibs =
ImmutableMap.of(
c1,
ImmutableMap.of(
v1,
MockFib.builder()
.setNextHopInterfaces(
ImmutableMap.of(
r1,
ImmutableMap.of(
i1,
ImmutableMap.of(r1.getNextHopIp(), ImmutableSet.of(ifaceRoute))),
r2,
ImmutableMap.of(
i1,
ImmutableMap.of(r2.getNextHopIp(), ImmutableSet.of(ifaceRoute)))))
.build()));
_someoneReplies = ImmutableMap.of(c1, ImmutableMap.of(i1, P2.getEndIp().toIpSpace()));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<Edge, Set<AbstractRoute>> result =
forwardingAnalysisImpl.computeRoutesWithNextHopIpArpTrue(fibs, topology);
/* Only the route with the next hop ip that gets a reply should be present. */
assertThat(result, equalTo(ImmutableMap.of(e1, ImmutableSet.of(r1))));
}
@Test
public void testComputeSomeoneReplies() {
String c1 = "c1";
String i1 = "i1";
String c2 = "c2";
String i2 = "i2";
Edge e1 = Edge.of(c1, i1, c2, i2);
_arpReplies = ImmutableMap.of(c2, ImmutableMap.of(i2, P1.toIpSpace()));
Topology topology = new Topology(ImmutableSortedSet.of(e1));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, Map<String, IpSpace>> result =
forwardingAnalysisImpl.computeSomeoneReplies(topology);
/* IPs allowed by neighbor should appear */
assertThat(result, hasEntry(equalTo(c1), hasEntry(equalTo(i1), containsIp(P1.getStartIp()))));
assertThat(result, hasEntry(equalTo(c1), hasEntry(equalTo(i1), containsIp(P1.getEndIp()))));
/* IPs not allowed by neighbor should not appear */
assertThat(
result, hasEntry(equalTo(c1), hasEntry(equalTo(i1), not(containsIp(P2.getStartIp())))));
}
@Test
public void testComputeInterfaceHostSubnetIps() {
Configuration c1 = _cb.build();
Map<String, Configuration> configs = ImmutableMap.of(c1.getHostname(), c1);
Vrf vrf1 = _vb.setOwner(c1).build();
Interface i1 =
_ib.setOwner(c1)
.setVrf(vrf1)
.setAddress(new InterfaceAddress(P1.getStartIp(), P1.getPrefixLength()))
.build();
Map<String, Map<String, Map<String, IpSpace>>> interfaceHostSubnetIps =
computeInterfaceHostSubnetIps(configs);
assertThat(
interfaceHostSubnetIps,
hasEntry(
equalTo(c1.getHostname()),
hasEntry(
equalTo(vrf1.getName()),
hasEntry(equalTo(i1.getName()), (containsIp(Ip.parse("1.0.0.2")))))));
assertThat(
interfaceHostSubnetIps,
hasEntry(
equalTo(c1.getHostname()),
hasEntry(
equalTo(vrf1.getName()),
hasEntry(equalTo(i1.getName()), not(containsIp(P1.getStartIp()))))));
assertThat(
interfaceHostSubnetIps,
hasEntry(
equalTo(c1.getHostname()),
hasEntry(
equalTo(vrf1.getName()),
hasEntry(equalTo(i1.getName()), not(containsIp(P1.getEndIp()))))));
}
@Test
public void testComputeInterfaceHostSubnetIpsWithPrefixLength31() {
Configuration c1 = _cb.build();
Map<String, Configuration> configs = ImmutableMap.of(c1.getHostname(), c1);
Vrf vrf1 = _vb.setOwner(c1).build();
Prefix prefix = Prefix.parse("1.0.0.1/31");
Interface i1 =
_ib.setOwner(c1)
.setVrf(vrf1)
.setAddress(new InterfaceAddress(prefix.getStartIp(), prefix.getPrefixLength()))
.build();
Map<String, Map<String, Map<String, IpSpace>>> interfaceHostSubnetIps =
computeInterfaceHostSubnetIps(configs);
assertThat(
interfaceHostSubnetIps,
hasEntry(
equalTo(c1.getHostname()),
hasEntry(
equalTo(vrf1.getName()),
hasEntry(equalTo(i1.getName()), containsIp(prefix.getStartIp())))));
assertThat(
interfaceHostSubnetIps,
hasEntry(
equalTo(c1.getHostname()),
hasEntry(
equalTo(vrf1.getName()),
hasEntry(equalTo(i1.getName()), containsIp(prefix.getEndIp())))));
}
@Test
public void testComputeDeliveredToSubnetNoArpFalse() {
String c1 = "c1";
String vrf1 = "vrf1";
String i1 = "i1";
Ip ip = Ip.parse("10.0.0.1");
_arpFalseDestIp =
ImmutableMap.of(c1, ImmutableMap.of(vrf1, ImmutableMap.of(i1, EmptyIpSpace.INSTANCE)));
_interfaceHostSubnetIps =
ImmutableMap.of(c1, ImmutableMap.of(vrf1, ImmutableMap.of(i1, ip.toIpSpace())));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, Map<String, Map<String, IpSpace>>> result =
forwardingAnalysisImpl.computeDeliveredToSubnet();
assertThat(
result,
hasEntry(equalTo(c1), hasEntry(equalTo(vrf1), hasEntry(equalTo(i1), not(containsIp(ip))))));
}
@Test
public void testComputeDeliveredToSubnetNoInterfaceHostIps() {
String c1 = "c1";
String vrf1 = "vrf1";
String i1 = "i1";
Ip ip = Ip.parse("10.0.0.1");
_arpFalseDestIp =
ImmutableMap.of(c1, ImmutableMap.of(vrf1, ImmutableMap.of(i1, ip.toIpSpace())));
_interfaceHostSubnetIps =
ImmutableMap.of(c1, ImmutableMap.of(vrf1, ImmutableMap.of(i1, EmptyIpSpace.INSTANCE)));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, Map<String, Map<String, IpSpace>>> result =
forwardingAnalysisImpl.computeDeliveredToSubnet();
assertThat(
result,
hasEntry(equalTo(c1), hasEntry(equalTo(vrf1), hasEntry(equalTo(i1), not(containsIp(ip))))));
}
@Test
public void testComputeDeliveredToSubnetEqual() {
String c1 = "c1";
String vrf1 = "vrf1";
String i1 = "i1";
Ip ip = Ip.parse("10.0.0.1");
_arpFalseDestIp =
ImmutableMap.of(c1, ImmutableMap.of(vrf1, ImmutableMap.of(i1, ip.toIpSpace())));
_interfaceHostSubnetIps =
ImmutableMap.of(c1, ImmutableMap.of(vrf1, ImmutableMap.of(i1, ip.toIpSpace())));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, Map<String, Map<String, IpSpace>>> result =
forwardingAnalysisImpl.computeDeliveredToSubnet();
assertThat(
result,
hasEntry(equalTo(c1), hasEntry(equalTo(vrf1), hasEntry(equalTo(i1), containsIp(ip)))));
}
enum NextHopIpStatus {
NONE,
INTERNAL,
EXTERNAL
}
private void testDispositionComputationTemplate(
NextHopIpStatus nextHopIpStatus,
boolean isSubnetFull,
boolean isDstIpInternal,
boolean isDstIpInSubnet,
FlowDisposition expectedDisposition) {
String nextHopIpString = "1.0.0.1";
Prefix dstPrefix = P3;
Ip nextHopIp = Ip.parse(nextHopIpString);
StaticRoute route =
StaticRoute.builder()
.setNextHopIp(Ip.parse(nextHopIpString))
.setNextHopInterface(INTERFACE1)
.setAdministrativeCost(1)
.setNetwork(dstPrefix)
.build();
AclIpSpace.Builder internalIpsBuilder = AclIpSpace.builder();
if (!isSubnetFull) {
_interfacesWithMissingDevices = ImmutableMap.of(CONFIG1, ImmutableSet.of(INTERFACE1));
} else {
_interfacesWithMissingDevices = ImmutableMap.of(CONFIG1, ImmutableSet.of());
}
if (nextHopIpStatus == NextHopIpStatus.EXTERNAL) {
_routesWithUnownedNextHopIpArpFalse =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, ImmutableSet.of(route))));
_routesWithOwnedNextHopIpArpFalse =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, ImmutableSet.of())));
_arpFalseDestIp =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, EmptyIpSpace.INSTANCE)));
_dstIpsWithOwnedNextHopIpArpFalse =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, EmptyIpSpace.INSTANCE)));
_dstIpsWithUnownedNextHopIpArpFalse =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, dstPrefix.toIpSpace())));
} else if (nextHopIpStatus == NextHopIpStatus.INTERNAL) {
_routesWithUnownedNextHopIpArpFalse =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, ImmutableSet.of())));
_routesWithOwnedNextHopIpArpFalse =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, ImmutableSet.of(route))));
_arpFalseDestIp =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, EmptyIpSpace.INSTANCE)));
internalIpsBuilder.thenPermitting(nextHopIp.toIpSpace());
_dstIpsWithOwnedNextHopIpArpFalse =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, dstPrefix.toIpSpace())));
_dstIpsWithUnownedNextHopIpArpFalse =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, EmptyIpSpace.INSTANCE)));
} else {
_routesWithUnownedNextHopIpArpFalse =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, ImmutableSet.of())));
_routesWithOwnedNextHopIpArpFalse =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, ImmutableSet.of())));
_arpFalseDestIp =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, dstPrefix.toIpSpace())));
_dstIpsWithOwnedNextHopIpArpFalse =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, EmptyIpSpace.INSTANCE)));
_dstIpsWithUnownedNextHopIpArpFalse =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, EmptyIpSpace.INSTANCE)));
}
if (isDstIpInternal) {
internalIpsBuilder.thenPermitting(dstPrefix.toIpSpace());
}
_internalIps = internalIpsBuilder.build();
if (isDstIpInSubnet) {
_interfaceHostSubnetIps =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, dstPrefix.toIpSpace())));
} else {
_interfaceHostSubnetIps =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, EmptyIpSpace.INSTANCE)));
}
_neighborUnreachableOrExitsNetwork =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, dstPrefix.toIpSpace())));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
IpSpace deliveredToSubnetIpSpace =
forwardingAnalysisImpl
.computeDeliveredToSubnet()
.getOrDefault(CONFIG1, ImmutableMap.of())
.getOrDefault(VRF1, ImmutableMap.of())
.getOrDefault(INTERFACE1, EmptyIpSpace.INSTANCE);
IpSpace exitsNetworkIpSpace =
forwardingAnalysisImpl.computeExitsNetworkPerInterface(CONFIG1, VRF1, INTERFACE1);
IpSpace insufficientInfoIpSpace =
forwardingAnalysisImpl.computeInsufficientInfoPerInterface(CONFIG1, VRF1, INTERFACE1);
IpSpace neighborUnreachableIpSpace =
forwardingAnalysisImpl.computeNeighborUnreachable().get(CONFIG1).get(VRF1).get(INTERFACE1);
if (expectedDisposition == FlowDisposition.EXITS_NETWORK) {
assertThat(exitsNetworkIpSpace, containsIp(dstPrefix.getStartIp()));
assertThat(exitsNetworkIpSpace, containsIp(dstPrefix.getEndIp()));
} else {
assertThat(exitsNetworkIpSpace, not(containsIp(dstPrefix.getStartIp())));
assertThat(exitsNetworkIpSpace, not(containsIp(dstPrefix.getEndIp())));
}
if (expectedDisposition == FlowDisposition.INSUFFICIENT_INFO) {
assertThat(insufficientInfoIpSpace, containsIp(dstPrefix.getStartIp()));
assertThat(insufficientInfoIpSpace, containsIp(dstPrefix.getEndIp()));
} else {
assertThat(insufficientInfoIpSpace, not(containsIp(dstPrefix.getStartIp())));
assertThat(insufficientInfoIpSpace, not(containsIp(dstPrefix.getEndIp())));
}
if (expectedDisposition == FlowDisposition.DELIVERED_TO_SUBNET) {
assertThat(deliveredToSubnetIpSpace, containsIp(dstPrefix.getStartIp()));
assertThat(deliveredToSubnetIpSpace, containsIp(dstPrefix.getEndIp()));
} else {
assertThat(deliveredToSubnetIpSpace, not(containsIp(dstPrefix.getStartIp())));
assertThat(deliveredToSubnetIpSpace, not(containsIp(dstPrefix.getEndIp())));
}
if (expectedDisposition == FlowDisposition.NEIGHBOR_UNREACHABLE) {
assertThat(neighborUnreachableIpSpace, (containsIp(dstPrefix.getStartIp())));
assertThat(neighborUnreachableIpSpace, (containsIp(dstPrefix.getEndIp())));
} else {
assertThat(neighborUnreachableIpSpace, not(containsIp(dstPrefix.getStartIp())));
assertThat(neighborUnreachableIpSpace, not(containsIp(dstPrefix.getEndIp())));
}
}
@Test
public void testDispositionComputation() {
/*
* Avoid the case where arp dst ip, interface is full, and dst ip is in subnet (would be accepted).
* Avoid cases where dst ip is internal but not in subet.
*/
// Arp dst ip, interface is full, dst ip is internal -> neighbor unreachable
testDispositionComputationTemplate(
NextHopIpStatus.NONE, true, true, false, FlowDisposition.NEIGHBOR_UNREACHABLE);
// Arp dst ip, interface is full, dst ip is external -> neighbor unreachable
testDispositionComputationTemplate(
NextHopIpStatus.NONE, true, false, false, FlowDisposition.NEIGHBOR_UNREACHABLE);
// Arp dst ip, interface is not full, dst ip is subnet -> delivered to subnet
testDispositionComputationTemplate(
NextHopIpStatus.NONE, false, true, true, FlowDisposition.DELIVERED_TO_SUBNET);
// Arp dst ip, interface is not full, dst ip is internal -> insufficient info
testDispositionComputationTemplate(
NextHopIpStatus.NONE, false, true, false, FlowDisposition.INSUFFICIENT_INFO);
// Arp dst ip, interface is not full, dst ip is external -> exits network
testDispositionComputationTemplate(
NextHopIpStatus.NONE, false, false, false, FlowDisposition.EXITS_NETWORK);
// nhip external, interface is full, dst ip is internal -> neighbor unreachable
testDispositionComputationTemplate(
NextHopIpStatus.EXTERNAL, true, true, true, FlowDisposition.NEIGHBOR_UNREACHABLE);
testDispositionComputationTemplate(
NextHopIpStatus.EXTERNAL, true, true, false, FlowDisposition.NEIGHBOR_UNREACHABLE);
// nhip external, interface is full, dst ip is external -> neighbor unreachable
testDispositionComputationTemplate(
NextHopIpStatus.EXTERNAL, true, false, false, FlowDisposition.NEIGHBOR_UNREACHABLE);
// nhip external, interface is not full, dst ip is internal -> insufficient info
testDispositionComputationTemplate(
NextHopIpStatus.EXTERNAL, false, true, true, FlowDisposition.INSUFFICIENT_INFO);
testDispositionComputationTemplate(
NextHopIpStatus.EXTERNAL, false, true, false, FlowDisposition.INSUFFICIENT_INFO);
// nhip external, interface is not full, dst ip is external -> exits network
testDispositionComputationTemplate(
NextHopIpStatus.EXTERNAL, false, false, false, FlowDisposition.EXITS_NETWORK);
// nhip internal, interface is full, dst ip is internal -> neighbor unreachable
testDispositionComputationTemplate(
NextHopIpStatus.INTERNAL, true, true, true, FlowDisposition.NEIGHBOR_UNREACHABLE);
testDispositionComputationTemplate(
NextHopIpStatus.INTERNAL, true, true, false, FlowDisposition.NEIGHBOR_UNREACHABLE);
// nhip internal, interface is full, dst ip is external -> neighbor unreachable
testDispositionComputationTemplate(
NextHopIpStatus.INTERNAL, true, false, false, FlowDisposition.NEIGHBOR_UNREACHABLE);
// nhip internal, interface is not full, dst ip is internal -> insufficient info
testDispositionComputationTemplate(
NextHopIpStatus.INTERNAL, false, true, true, FlowDisposition.INSUFFICIENT_INFO);
testDispositionComputationTemplate(
NextHopIpStatus.INTERNAL, false, true, false, FlowDisposition.INSUFFICIENT_INFO);
// nhip internal, interface is not full, dst ip is external -> insufficient info
testDispositionComputationTemplate(
NextHopIpStatus.INTERNAL, false, false, false, FlowDisposition.INSUFFICIENT_INFO);
}
@Test
public void testHasMissingDevicesOnInterface_full30() {
Prefix subnet = Prefix.parse("1.0.0.0/30");
Ip ip1 = Ip.parse("1.0.0.1");
Ip ip2 = Ip.parse("1.0.0.2");
_interfaceHostSubnetIps =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, subnet.toHostIpSpace())));
_interfaceOwnedIps =
ImmutableMap.of(
CONFIG1,
ImmutableMap.of(INTERFACE1, ImmutableSet.of(ip1), INTERFACE2, ImmutableSet.of(ip2)));
ForwardingAnalysisImpl fa = initForwardingAnalysisImpl();
assertThat("INTERFACE1 should be full", !fa.hasMissingDevicesOnInterface(CONFIG1, INTERFACE1));
}
@Test
public void testHasMissingDevicesOnInterface_full31() {
Prefix subnet = Prefix.parse("1.0.0.0/31");
Ip ip1 = Ip.parse("1.0.0.0");
Ip ip2 = Ip.parse("1.0.0.1");
_interfaceHostSubnetIps =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, subnet.toHostIpSpace())));
_interfaceOwnedIps =
ImmutableMap.of(
CONFIG1,
ImmutableMap.of(INTERFACE1, ImmutableSet.of(ip1), INTERFACE2, ImmutableSet.of(ip2)));
ForwardingAnalysisImpl fa = initForwardingAnalysisImpl();
assertThat("INTERFACE1 should be full", !fa.hasMissingDevicesOnInterface(CONFIG1, INTERFACE1));
}
@Test
public void testHasMissingDevicesOnInterface_full32() {
Prefix subnet = Prefix.parse("1.0.0.0/32");
Ip ip1 = Ip.parse("1.0.0.0");
_interfaceHostSubnetIps =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, subnet.toHostIpSpace())));
_interfaceOwnedIps =
ImmutableMap.of(CONFIG1, ImmutableMap.of(INTERFACE1, ImmutableSet.of(ip1)));
ForwardingAnalysisImpl fa = initForwardingAnalysisImpl();
assertThat("INTERFACE1 should be full", !fa.hasMissingDevicesOnInterface(CONFIG1, INTERFACE1));
}
@Test
public void testHasMissingDevicesOnInterface_notFull30() {
Prefix subnet = Prefix.parse("1.0.0.0/30");
Ip ip1 = Ip.parse("1.0.0.1");
_interfaceHostSubnetIps =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, subnet.toHostIpSpace())));
_interfaceOwnedIps =
ImmutableMap.of(CONFIG1, ImmutableMap.of(INTERFACE1, ImmutableSet.of(ip1)));
ForwardingAnalysisImpl fa = initForwardingAnalysisImpl();
assertThat(
"INTERFACE1 should not be full", fa.hasMissingDevicesOnInterface(CONFIG1, INTERFACE1));
}
@Test
public void testHasMissingDevicesOnInterface_notFull31() {
Prefix subnet = Prefix.parse("1.0.0.0/31");
Ip ip1 = Ip.parse("1.0.0.0");
_interfaceHostSubnetIps =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, subnet.toHostIpSpace())));
_interfaceOwnedIps =
ImmutableMap.of(CONFIG1, ImmutableMap.of(INTERFACE1, ImmutableSet.of(ip1)));
ForwardingAnalysisImpl fa = initForwardingAnalysisImpl();
assertThat(
"INTERFACE1 should not be full", fa.hasMissingDevicesOnInterface(CONFIG1, INTERFACE1));
}
// If two nodes are in the same subnet but not connected per the given topology,
// sending packets from one to the other should result in Neighbor Unreachable.
@Test
public void testDispositionWithTopology() {
Prefix prefix = Prefix.parse("1.0.0.0/24");
IpSpace ipSpace = prefix.toIpSpace();
Ip ip2 = Ip.parse("1.0.0.2");
Configuration c1 = _cb.setHostname("c1").build();
Configuration c2 = _cb.setHostname("c2").build();
Vrf v1 = _vb.setName("v1").setOwner(c1).build();
Vrf v2 = _vb.setName("v2").setOwner(c2).build();
_ib.setActive(true);
Interface i1 =
_ib.setAddresses(new InterfaceAddress("1.0.0.1/24"))
.setName("i1")
.setOwner(c1)
.setVrf(v1)
.build();
Interface i2 =
_ib.setAddresses(new InterfaceAddress("1.0.0.2/24"))
.setName("i2")
.setOwner(c2)
.setVrf(v2)
.build();
StaticRoute route1 =
StaticRoute.builder()
.setNetwork(prefix)
.setNextHopInterface(i1.getName())
.setAdministrativeCost(1)
.build();
StaticRoute route2 =
StaticRoute.builder()
.setNextHopInterface(i2.getName())
.setNetwork(prefix)
.setAdministrativeCost(1)
.build();
v1.setStaticRoutes(ImmutableSortedSet.of(route1));
v2.setStaticRoutes(ImmutableSortedSet.of(route2));
MockRib rib1 =
MockRib.builder()
.setRoutes(ImmutableSet.of(route1))
.setRoutableIps(ipSpace)
.setMatchingIps(ImmutableMap.of(prefix, ipSpace))
.build();
MockRib rib2 =
MockRib.builder()
.setRoutes(ImmutableSet.of(route2))
.setRoutableIps(ipSpace)
.setMatchingIps(ImmutableMap.of(prefix, ipSpace))
.build();
SortedMap<String, SortedMap<String, GenericRib<AbstractRoute>>> ribs =
ImmutableSortedMap.of(
c1.getHostname(),
ImmutableSortedMap.of(v1.getName(), rib1),
c2.getHostname(),
ImmutableSortedMap.of(v2.getName(), rib2));
MockFib fib1 =
MockFib.builder()
.setNextHopInterfaces(
ImmutableMap.of(
route1,
ImmutableMap.of(
i1.getName(),
ImmutableMap.of(Route.UNSET_ROUTE_NEXT_HOP_IP, ImmutableSet.of(route1)))))
.setRoutesByNextHopInterface(ImmutableMap.of(i1.getName(), ImmutableSet.of(route1)))
.build();
MockFib fib2 =
MockFib.builder()
.setNextHopInterfaces(
ImmutableMap.of(
route2,
ImmutableMap.of(
i2.getName(),
ImmutableMap.of(Route.UNSET_ROUTE_NEXT_HOP_IP, ImmutableSet.of(route2)))))
.setRoutesByNextHopInterface(ImmutableMap.of(i2.getName(), ImmutableSet.of(route2)))
.build();
Map<String, Map<String, Fib>> fibs =
ImmutableMap.of(
c1.getHostname(), ImmutableMap.of(v1.getName(), fib1),
c2.getHostname(), ImmutableMap.of(v2.getName(), fib2));
ForwardingAnalysis analysis =
new ForwardingAnalysisImpl(
ImmutableMap.of(c1.getHostname(), c1, c2.getHostname(), c2),
ribs,
fibs,
new Topology(ImmutableSortedSet.of()));
assertFalse(
analysis
.getDeliveredToSubnet()
.get(c1.getHostname())
.get(v1.getName())
.get(i1.getName())
.containsIp(ip2, c1.getIpSpaces()));
assertTrue(
analysis
.getNeighborUnreachable()
.get(c1.getHostname())
.get(v1.getName())
.get(i1.getName())
.containsIp(ip2, c1.getIpSpaces()));
assertFalse(
analysis
.getInsufficientInfo()
.get(c1.getHostname())
.get(v1.getName())
.get(i1.getName())
.containsIp(ip2, c1.getIpSpaces()));
}
}
| projects/batfish-common-protocol/src/test/java/org/batfish/datamodel/ForwardingAnalysisImplTest.java | package org.batfish.datamodel;
import static org.batfish.datamodel.ForwardingAnalysisImpl.computeInterfaceHostSubnetIps;
import static org.batfish.datamodel.ForwardingAnalysisImpl.computeMatchingIps;
import static org.batfish.datamodel.matchers.AclIpSpaceMatchers.hasLines;
import static org.batfish.datamodel.matchers.AclIpSpaceMatchers.isAclIpSpaceThat;
import static org.batfish.datamodel.matchers.IpSpaceMatchers.containsIp;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.not;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedMap;
import com.google.common.collect.ImmutableSortedSet;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import org.batfish.common.topology.TopologyUtil;
import org.junit.Before;
import org.junit.Test;
public class ForwardingAnalysisImplTest {
private static final String CONFIG1 = "config1";
private static final String VRF1 = "vrf1";
private static final String INTERFACE1 = "interface1";
private static final String INTERFACE2 = "interface2";
private static final IpSpace IPSPACE1 = new MockIpSpace(1);
private static final IpSpace IPSPACE2 = new MockIpSpace(2);
private static final Prefix P1 = Prefix.parse("1.0.0.0/8");
private static final Prefix P2 = Prefix.parse("2.0.0.0/16");
private static final Prefix P3 = Prefix.parse("3.0.0.0/24");
private Map<String, Map<String, IpSpace>> _arpReplies;
private Map<Edge, IpSpace> _arpTrueEdge;
private Map<Edge, IpSpace> _arpTrueEdgeDestIp;
private Map<Edge, IpSpace> _arpTrueEdgeNextHopIp;
private Configuration.Builder _cb;
private Interface.Builder _ib;
private Map<String, Map<String, Set<Ip>>> _interfaceOwnedIps = ImmutableMap.of();
private Map<String, Map<String, IpSpace>> _ipsRoutedOutInterfaces;
private Map<String, Map<String, Map<String, IpSpace>>> _neighborUnreachableOrExitsNetwork;
private Map<String, Map<String, Map<String, IpSpace>>> _arpFalseDestIp;
private Map<String, Map<String, Map<String, IpSpace>>> _arpFalseNextHopIp;
private NetworkFactory _nf;
private Map<String, Map<String, IpSpace>> _nullRoutedIps;
private Map<String, Map<String, IpSpace>> _routableIps;
private Map<String, Map<String, Map<String, Set<AbstractRoute>>>> _routesWhereDstIpCanBeArpIp;
private Map<Edge, Set<AbstractRoute>> _routesWithDestIpEdge;
private Map<String, Map<String, Map<String, Set<AbstractRoute>>>> _routesWithNextHop;
private Map<String, Map<String, Map<String, Set<AbstractRoute>>>> _routesWithNextHopIpArpFalse;
private Map<Edge, Set<AbstractRoute>> _routesWithNextHopIpArpTrue;
private Map<String, Map<String, IpSpace>> _someoneReplies;
private Map<String, Map<String, Map<String, IpSpace>>> _interfaceHostSubnetIps =
ImmutableMap.of();
private Map<String, Set<String>> _interfacesWithMissingDevices;
private Map<String, Map<String, Map<String, Set<AbstractRoute>>>>
_routesWithUnownedNextHopIpArpFalse;
private Map<String, Map<String, Map<String, Set<AbstractRoute>>>>
_routesWithOwnedNextHopIpArpFalse;
private Map<String, Map<String, Map<String, IpSpace>>> _dstIpsWithUnownedNextHopIpArpFalse;
private Map<String, Map<String, Map<String, IpSpace>>> _dstIpsWithOwnedNextHopIpArpFalse;
private Vrf.Builder _vb;
private IpSpace _internalIps = EmptyIpSpace.INSTANCE;
private ForwardingAnalysisImpl initForwardingAnalysisImpl() {
return new ForwardingAnalysisImpl(
_arpReplies,
_arpTrueEdge,
_arpTrueEdgeDestIp,
_arpTrueEdgeNextHopIp,
_interfaceOwnedIps,
_ipsRoutedOutInterfaces,
_neighborUnreachableOrExitsNetwork,
_arpFalseDestIp,
_arpFalseNextHopIp,
_nullRoutedIps,
_routableIps,
_routesWhereDstIpCanBeArpIp,
_routesWithDestIpEdge,
_routesWithNextHop,
_routesWithNextHopIpArpFalse,
_routesWithNextHopIpArpTrue,
_someoneReplies,
_interfaceHostSubnetIps,
_interfacesWithMissingDevices,
_routesWithUnownedNextHopIpArpFalse,
_routesWithOwnedNextHopIpArpFalse,
_dstIpsWithUnownedNextHopIpArpFalse,
_dstIpsWithOwnedNextHopIpArpFalse,
_internalIps);
}
@Before
public void setup() {
_nf = new NetworkFactory();
_cb = _nf.configurationBuilder().setConfigurationFormat(ConfigurationFormat.CISCO_IOS);
_vb = _nf.vrfBuilder();
_ib = _nf.interfaceBuilder();
}
@Test
public void testComputeArpReplies() {
Configuration c1 = _cb.build();
Configuration c2 = _cb.build();
Map<String, Configuration> configs =
ImmutableMap.of(c1.getHostname(), c1, c2.getHostname(), c2);
Vrf vrf1 = _vb.setOwner(c1).build();
Vrf vrf2 = _vb.setOwner(c2).build();
Interface i1 =
_ib.setOwner(c1)
.setVrf(vrf1)
.setAddress(new InterfaceAddress(P1.getStartIp(), P1.getPrefixLength()))
.setProxyArp(true)
.build();
Interface i2 =
_ib.setOwner(c2)
.setVrf(vrf2)
.setAddress(new InterfaceAddress(P2.getStartIp(), P2.getPrefixLength()))
.setProxyArp(false)
.build();
IpSpace ipsRoutedOutI1 =
IpWildcardSetIpSpace.builder().including(new IpWildcard(P1), new IpWildcard(P3)).build();
IpSpace ipsRoutedOutI2 = IpWildcardSetIpSpace.builder().including(new IpWildcard(P2)).build();
Map<String, Configuration> configurations =
ImmutableMap.of(c1.getHostname(), c1, c2.getHostname(), c2);
SortedMap<String, SortedMap<String, GenericRib<AbstractRoute>>> ribs =
ImmutableSortedMap.of(
c1.getHostname(),
ImmutableSortedMap.of(
vrf1.getName(), MockRib.builder().setRoutableIps(UniverseIpSpace.INSTANCE).build()),
c2.getHostname(),
ImmutableSortedMap.of(
vrf2.getName(),
MockRib.builder().setRoutableIps(UniverseIpSpace.INSTANCE).build()));
_ipsRoutedOutInterfaces =
ImmutableMap.of(
c1.getHostname(),
ImmutableMap.of(i1.getName(), ipsRoutedOutI1),
c2.getHostname(),
ImmutableMap.of(i2.getName(), ipsRoutedOutI2));
_interfaceOwnedIps = TopologyUtil.computeInterfaceOwnedIps(configs, false);
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, Map<String, IpSpace>> result =
forwardingAnalysisImpl.computeArpReplies(configurations, ribs);
/* Proxy-arp: Match interface IP, reject what's routed through i1, accept everything else*/
assertThat(
result,
hasEntry(
equalTo(c1.getHostname()),
hasEntry(equalTo(i1.getName()), containsIp(P1.getStartIp()))));
assertThat(
result,
hasEntry(
equalTo(c1.getHostname()),
hasEntry(equalTo(i1.getName()), not(containsIp(P1.getEndIp())))));
assertThat(
result,
hasEntry(
equalTo(c1.getHostname()),
hasEntry(equalTo(i1.getName()), not(containsIp(P3.getStartIp())))));
assertThat(
result,
hasEntry(
equalTo(c1.getHostname()),
hasEntry(equalTo(i1.getName()), containsIp(P2.getStartIp()))));
/* No proxy-arp: just match interface ip*/
assertThat(
result,
hasEntry(
equalTo(c2.getHostname()),
hasEntry(equalTo(i2.getName()), containsIp(P2.getStartIp()))));
assertThat(
result,
hasEntry(
equalTo(c2.getHostname()),
hasEntry(equalTo(i2.getName()), not(containsIp(P2.getEndIp())))));
assertThat(
result,
hasEntry(
equalTo(c2.getHostname()),
hasEntry(equalTo(i2.getName()), not(containsIp(P3.getStartIp())))));
assertThat(
result,
hasEntry(
equalTo(c2.getHostname()),
hasEntry(equalTo(i2.getName()), not(containsIp(P1.getStartIp())))));
}
@Test
public void testComputeArpRepliesByInterface() {
Configuration config = _cb.build();
_ib.setOwner(config);
_vb.setOwner(config);
Vrf vrf1 = _vb.build();
Vrf vrf2 = _vb.build();
Interface i1 =
_ib.setVrf(vrf1)
.setAddress(new InterfaceAddress(P1.getStartIp(), P1.getPrefixLength()))
.setProxyArp(true)
.build();
Interface i2 =
_ib.setVrf(vrf2)
.setAddress(new InterfaceAddress(P2.getStartIp(), P2.getPrefixLength()))
.setProxyArp(false)
.build();
Interface i3 = _ib.setAddress(null).setProxyArp(true).build();
IpSpace ipsRoutedOutI1 =
IpWildcardSetIpSpace.builder().including(new IpWildcard(P1), new IpWildcard(P3)).build();
IpSpace ipsRoutedOutI2 = IpWildcardSetIpSpace.builder().including(new IpWildcard(P2)).build();
IpSpace ipsRoutedOutI3 = EmptyIpSpace.INSTANCE;
Map<String, Interface> interfaces =
ImmutableMap.of(i1.getName(), i1, i2.getName(), i2, i3.getName(), i3);
Map<String, IpSpace> routableIpsByVrf =
ImmutableMap.of(
vrf1.getName(), UniverseIpSpace.INSTANCE, vrf2.getName(), UniverseIpSpace.INSTANCE);
Map<String, IpSpace> ipsRoutedOutInterfaces =
ImmutableMap.of(
i1.getName(),
ipsRoutedOutI1,
i2.getName(),
ipsRoutedOutI2,
i3.getName(),
ipsRoutedOutI3);
Map<String, Configuration> configs = ImmutableMap.of(config.getHostname(), config);
_interfaceOwnedIps = TopologyUtil.computeInterfaceOwnedIps(configs, false);
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, IpSpace> result =
forwardingAnalysisImpl.computeArpRepliesByInterface(
interfaces, routableIpsByVrf, ipsRoutedOutInterfaces);
/* Proxy-arp: Match interface IP, reject what's routed through i1, accept everything else*/
assertThat(result, hasEntry(equalTo(i1.getName()), containsIp(P1.getStartIp())));
assertThat(result, hasEntry(equalTo(i1.getName()), not(containsIp(P1.getEndIp()))));
assertThat(result, hasEntry(equalTo(i1.getName()), not(containsIp(P3.getStartIp()))));
assertThat(result, hasEntry(equalTo(i1.getName()), containsIp(P2.getStartIp())));
/* No proxy-arp: just match interface ip*/
assertThat(result, hasEntry(equalTo(i2.getName()), containsIp(P2.getStartIp())));
assertThat(result, hasEntry(equalTo(i2.getName()), not(containsIp(P2.getEndIp()))));
assertThat(result, hasEntry(equalTo(i2.getName()), not(containsIp(P3.getStartIp()))));
assertThat(result, hasEntry(equalTo(i2.getName()), not(containsIp(P1.getStartIp()))));
/* No interface IPs: reject everything */
assertThat(result, hasEntry(equalTo(i3.getName()), equalTo(EmptyIpSpace.INSTANCE)));
}
@Test
public void testComputeArpReplies_VRRP() {
Configuration c = _cb.build();
Map<String, Configuration> configs = ImmutableMap.of(c.getHostname(), c);
_ib.setOwner(c);
Vrf vrf1 = _vb.build();
Vrf vrf2 = _vb.build();
Interface i1 =
_ib.setVrf(vrf1)
.setAddress(new InterfaceAddress(P1.getStartIp(), P1.getPrefixLength()))
.setVrrpGroups(
ImmutableSortedMap.of(
1,
VrrpGroup.builder()
.setName(1)
.setPriority(100)
.setVirtualAddress(new InterfaceAddress("1.1.1.1/32"))
.build()))
.build();
Interface i2 =
_ib.setVrf(vrf2)
.setAddress(new InterfaceAddress(P1.getEndIp(), P1.getPrefixLength()))
.setVrrpGroups(
ImmutableSortedMap.of(
1,
VrrpGroup.builder()
.setName(1)
.setPriority(110)
.setVirtualAddress(new InterfaceAddress("1.1.1.1/32"))
.build()))
.build();
_interfaceOwnedIps = TopologyUtil.computeInterfaceOwnedIps(configs, false);
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
IpSpace p1IpSpace = new IpWildcard(P1).toIpSpace();
IpSpace i1ArpReplies =
forwardingAnalysisImpl.computeInterfaceArpReplies(i1, UniverseIpSpace.INSTANCE, p1IpSpace);
IpSpace i2ArpReplies =
forwardingAnalysisImpl.computeInterfaceArpReplies(i2, UniverseIpSpace.INSTANCE, p1IpSpace);
assertThat(i1ArpReplies, not(containsIp(Ip.parse("1.1.1.1"))));
assertThat(i2ArpReplies, containsIp(Ip.parse("1.1.1.1")));
}
@Test
public void testComputeArpTrueEdge() {
IpSpace nextHopIpSpace = new MockIpSpace(1);
IpSpace dstIpSpace = new MockIpSpace(2);
Edge e1 = Edge.of("c1", "i1", "c2", "i2");
_arpTrueEdgeDestIp = ImmutableMap.of(e1, dstIpSpace);
_arpTrueEdgeNextHopIp = ImmutableMap.of(e1, nextHopIpSpace);
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<Edge, IpSpace> result = forwardingAnalysisImpl.computeArpTrueEdge();
assertThat(
result,
hasEntry(
equalTo(e1),
isAclIpSpaceThat(
hasLines(
containsInAnyOrder(
AclIpSpaceLine.permit(nextHopIpSpace),
AclIpSpaceLine.permit(dstIpSpace))))));
}
@Test
public void testComputeArpTrueEdgeDestIp() {
Configuration c1 = _cb.build();
Configuration c2 = _cb.build();
Vrf vrf1 = _vb.setOwner(c1).build();
Vrf vrf2 = _vb.setOwner(c2).build();
Interface i1 = _ib.setOwner(c1).setVrf(vrf1).build();
Ip i2Ip = Ip.create(P1.getStartIp().asLong() + 1);
Interface i2 = _ib.setOwner(c2).setVrf(vrf2).build();
Map<String, Configuration> configurations =
ImmutableMap.of(c1.getHostname(), c1, c2.getHostname(), c2);
SortedMap<String, SortedMap<String, GenericRib<AbstractRoute>>> ribs =
ImmutableSortedMap.of(
c1.getHostname(),
ImmutableSortedMap.of(
vrf1.getName(),
MockRib.builder()
.setMatchingIps(
ImmutableMap.of(
P1,
AclIpSpace.rejecting(
Prefix.create(P1.getEndIp(), Prefix.MAX_PREFIX_LENGTH)
.toIpSpace())
.thenPermitting(P1.toIpSpace())
.build()))
.build()));
Edge edge = Edge.of(c1.getHostname(), i1.getName(), c2.getHostname(), i2.getName());
_routesWithDestIpEdge =
ImmutableMap.of(edge, ImmutableSet.of(new ConnectedRoute(P1, i1.getName())));
_arpReplies =
ImmutableMap.of(
c2.getHostname(),
ImmutableMap.of(
i2.getName(),
AclIpSpace.permitting(i2Ip.toIpSpace())
.thenPermitting(P1.getEndIp().toIpSpace())
.build()));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<Edge, IpSpace> result =
forwardingAnalysisImpl.computeArpTrueEdgeDestIp(configurations, computeMatchingIps(ribs));
/* Respond to request for IP on i2. */
assertThat(result, hasEntry(equalTo(edge), containsIp(i2Ip)));
/* Do not make ARP request for IP matched by more specific route not going out i1. */
assertThat(result, hasEntry(equalTo(edge), not(containsIp(P1.getEndIp()))));
/* Do not receive response for IP i2 does not own. */
assertThat(result, hasEntry(equalTo(edge), not(containsIp(P1.getStartIp()))));
}
@Test
public void testComputeArpTrueEdgeNextHopIp() {
Configuration c1 = _cb.build();
Configuration c2 = _cb.build();
Vrf vrf1 = _vb.setOwner(c1).build();
Vrf vrf2 = _vb.setOwner(c2).build();
Interface i1 =
_ib.setOwner(c1)
.setVrf(vrf1)
.setAddress(new InterfaceAddress(P1.getStartIp(), P1.getPrefixLength()))
.build();
Ip i2Ip = Ip.create(P1.getStartIp().asLong() + 1);
Interface i2 =
_ib.setOwner(c2)
.setVrf(vrf2)
.setAddress(new InterfaceAddress(i2Ip, P1.getPrefixLength()))
.build();
Edge edge = Edge.of(c1.getHostname(), i1.getName(), c2.getHostname(), i2.getName());
Map<String, Configuration> configurations =
ImmutableMap.of(c1.getHostname(), c1, c2.getHostname(), c2);
SortedMap<String, SortedMap<String, GenericRib<AbstractRoute>>> ribs =
ImmutableSortedMap.of(
c1.getHostname(),
ImmutableSortedMap.of(
vrf1.getName(),
MockRib.builder()
.setMatchingIps(
ImmutableMap.of(
P1,
AclIpSpace.rejecting(
Prefix.create(P1.getEndIp(), Prefix.MAX_PREFIX_LENGTH)
.toIpSpace())
.thenPermitting(P1.toIpSpace())
.build()))
.build()));
_routesWithNextHopIpArpTrue =
ImmutableMap.of(
edge,
ImmutableSet.of(
StaticRoute.builder()
.setNetwork(P1)
.setNextHopIp(P2.getStartIp())
.setAdministrativeCost(1)
.build()));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<Edge, IpSpace> result =
forwardingAnalysisImpl.computeArpTrueEdgeNextHopIp(
configurations, computeMatchingIps(ribs));
/*
* Respond for any destination IP in network not matching more specific route not going out i1.
*/
assertThat(result, hasEntry(equalTo(edge), containsIp(P1.getStartIp())));
assertThat(result, hasEntry(equalTo(edge), containsIp(i2Ip)));
/* Do not respond for destination IP matching more specific route not going out i1 */
assertThat(result, hasEntry(equalTo(edge), not(containsIp(P1.getEndIp()))));
/* Do not respond for destination IPs not matching route */
assertThat(result, hasEntry(equalTo(edge), not(containsIp(P2.getStartIp()))));
assertThat(result, hasEntry(equalTo(edge), not(containsIp(P2.getEndIp()))));
}
@Test
public void testComputeInterfaceArpReplies() {
Configuration config = _cb.build();
_ib.setOwner(config);
InterfaceAddress primary = new InterfaceAddress(P1.getStartIp(), P1.getPrefixLength());
InterfaceAddress secondary = new InterfaceAddress(P2.getStartIp(), P2.getPrefixLength());
Interface iNoProxyArp = _ib.setAddresses(primary, secondary).build();
Interface iProxyArp = _ib.setProxyArp(true).build();
IpSpace routableIpsForThisVrf = UniverseIpSpace.INSTANCE;
IpSpace ipsRoutedThroughInterface =
IpWildcardSetIpSpace.builder().including(new IpWildcard(P1), new IpWildcard(P2)).build();
_interfaceOwnedIps =
TopologyUtil.computeInterfaceOwnedIps(ImmutableMap.of(config.getHostname(), config), false);
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
IpSpace noProxyArpResult =
forwardingAnalysisImpl.computeInterfaceArpReplies(
iNoProxyArp, routableIpsForThisVrf, ipsRoutedThroughInterface);
IpSpace proxyArpResult =
forwardingAnalysisImpl.computeInterfaceArpReplies(
iProxyArp, routableIpsForThisVrf, ipsRoutedThroughInterface);
/* No proxy-ARP */
/* Accept IPs belonging to interface */
assertThat(noProxyArpResult, containsIp(P1.getStartIp()));
assertThat(noProxyArpResult, containsIp(P2.getStartIp()));
/* Reject all other IPs */
assertThat(noProxyArpResult, not(containsIp(P1.getEndIp())));
assertThat(noProxyArpResult, not(containsIp(P2.getEndIp())));
assertThat(noProxyArpResult, not(containsIp(P3.getStartIp())));
/* Proxy-ARP */
/* Accept IPs belonging to interface */
assertThat(proxyArpResult, containsIp(P1.getStartIp()));
assertThat(proxyArpResult, containsIp(P2.getStartIp()));
/* Reject IPs routed through interface */
assertThat(proxyArpResult, not(containsIp(P1.getEndIp())));
assertThat(proxyArpResult, not(containsIp(P2.getEndIp())));
/* Accept all other routable IPs */
assertThat(proxyArpResult, containsIp(P3.getStartIp()));
}
@Test
public void testComputeIpsAssignedToThisInterface() {
Configuration config = _cb.build();
Map<String, Configuration> configs = ImmutableMap.of(config.getHostname(), config);
_ib.setOwner(config);
InterfaceAddress primary = new InterfaceAddress(P1.getStartIp(), P1.getPrefixLength());
InterfaceAddress secondary = new InterfaceAddress(P2.getStartIp(), P2.getPrefixLength());
Interface i = _ib.setAddresses(primary, secondary).build();
_interfaceOwnedIps = TopologyUtil.computeInterfaceOwnedIps(configs, false);
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
IpSpace result = forwardingAnalysisImpl.computeIpsAssignedToThisInterface(i);
assertThat(result, containsIp(P1.getStartIp()));
assertThat(result, containsIp(P2.getStartIp()));
assertThat(result, not(containsIp(P2.getEndIp())));
}
@Test
public void testComputeIpsRoutedOutInterfaces() {
String c1 = "c1";
String v1 = "v1";
String i1 = "i1";
ConnectedRoute r1 = new ConnectedRoute(P1, i1);
StaticRoute nullRoute =
StaticRoute.builder()
.setNetwork(P2)
.setNextHopInterface(Interface.NULL_INTERFACE_NAME)
.setAdministrativeCost(1)
.build();
SortedMap<String, SortedMap<String, GenericRib<AbstractRoute>>> ribs =
ImmutableSortedMap.of(
c1,
ImmutableSortedMap.of(
v1,
MockRib.builder()
.setMatchingIps(ImmutableMap.of(P1, P1.toIpSpace(), P2, P2.toIpSpace()))
.build()));
_routesWithNextHop =
ImmutableMap.of(
c1,
ImmutableMap.of(
v1,
ImmutableMap.of(
i1,
ImmutableSet.of(r1),
Interface.NULL_INTERFACE_NAME,
ImmutableSet.of(nullRoute))));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, Map<String, IpSpace>> result =
forwardingAnalysisImpl.computeIpsRoutedOutInterfaces(computeMatchingIps(ribs));
/* Should contain IPs matching the route */
assertThat(result, hasEntry(equalTo(c1), hasEntry(equalTo(i1), containsIp(P1.getStartIp()))));
assertThat(result, hasEntry(equalTo(c1), hasEntry(equalTo(i1), containsIp(P1.getEndIp()))));
/* Should not contain IP not matching the route */
assertThat(
result, hasEntry(equalTo(c1), hasEntry(equalTo(i1), not(containsIp(P2.getStartIp())))));
/* Null interface should be excluded because we would not be able to tie back to single VRF. */
assertThat(result, hasEntry(equalTo(c1), not(hasKey(equalTo(Interface.NULL_INTERFACE_NAME)))));
}
@Test
public void testComputeNeighborUnreachableOrExitsNetwork() {
String c1 = "c1";
String v1 = "v1";
String i1 = "i1";
_arpFalseDestIp =
ImmutableMap.of(c1, ImmutableMap.of(v1, ImmutableMap.of(i1, P1.getStartIp().toIpSpace())));
_arpFalseNextHopIp =
ImmutableMap.of(c1, ImmutableMap.of(v1, ImmutableMap.of(i1, P1.getEndIp().toIpSpace())));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, Map<String, Map<String, IpSpace>>> result =
forwardingAnalysisImpl.computeArpFalse();
/* Should contain both IPs. */
assertThat(
result,
hasEntry(
equalTo(c1),
hasEntry(equalTo(v1), hasEntry(equalTo(i1), containsIp(P1.getStartIp())))));
assertThat(
result,
hasEntry(
equalTo(c1), hasEntry(equalTo(v1), hasEntry(equalTo(i1), containsIp(P1.getEndIp())))));
/* Should not contain unrelated IPs. */
assertThat(
result,
hasEntry(
equalTo(c1),
hasEntry(equalTo(v1), hasEntry(equalTo(i1), not(containsIp(P2.getEndIp()))))));
}
@Test
public void testComputeArpFalseDestIp() {
String c1 = "c1";
String v1 = "v1";
String i1 = "i1";
AbstractRoute ifaceRoute = new ConnectedRoute(P1, i1);
SortedMap<String, SortedMap<String, GenericRib<AbstractRoute>>> ribs =
ImmutableSortedMap.of(
c1,
ImmutableSortedMap.of(
v1, MockRib.builder().setMatchingIps(ImmutableMap.of(P1, P1.toIpSpace())).build()));
_routesWhereDstIpCanBeArpIp =
ImmutableMap.of(c1, ImmutableMap.of(v1, ImmutableMap.of(i1, ImmutableSet.of(ifaceRoute))));
_someoneReplies = ImmutableMap.of(c1, ImmutableMap.of(i1, P1.getEndIp().toIpSpace()));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, Map<String, Map<String, IpSpace>>> result =
forwardingAnalysisImpl.computeArpFalseDestIp(computeMatchingIps(ribs));
/* Should contain IP in the route's prefix that sees no reply */
assertThat(
result,
hasEntry(
equalTo(c1),
hasEntry(equalTo(v1), hasEntry(equalTo(i1), containsIp(P1.getStartIp())))));
/* Should not contain IP in the route's prefix that sees reply */
assertThat(
result,
hasEntry(
equalTo(c1),
hasEntry(equalTo(v1), hasEntry(equalTo(i1), not(containsIp(P1.getEndIp()))))));
/* Should not contain other IPs */
assertThat(
result,
hasEntry(
equalTo(c1),
hasEntry(equalTo(v1), hasEntry(equalTo(i1), not(containsIp(P2.getEndIp()))))));
}
@Test
public void testComputeArpFalseDestIpNoNeighbors() {
String c1 = "c1";
String v1 = "v1";
String i1 = "i1";
AbstractRoute ifaceRoute = new ConnectedRoute(P1, i1);
SortedMap<String, SortedMap<String, GenericRib<AbstractRoute>>> ribs =
ImmutableSortedMap.of(
c1,
ImmutableSortedMap.of(
v1, MockRib.builder().setMatchingIps(ImmutableMap.of(P1, P1.toIpSpace())).build()));
_routesWhereDstIpCanBeArpIp =
ImmutableMap.of(c1, ImmutableMap.of(v1, ImmutableMap.of(i1, ImmutableSet.of(ifaceRoute))));
_someoneReplies = ImmutableMap.of();
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, Map<String, Map<String, IpSpace>>> result =
forwardingAnalysisImpl.computeArpFalseDestIp(computeMatchingIps(ribs));
/*
* Since _someoneReplies is empty, all IPs for which longest-prefix-match route has no
* next-hop-ip should be in the result space.
*/
assertThat(
result,
hasEntry(
equalTo(c1),
hasEntry(equalTo(v1), hasEntry(equalTo(i1), containsIp(P1.getStartIp())))));
assertThat(
result,
hasEntry(
equalTo(c1), hasEntry(equalTo(v1), hasEntry(equalTo(i1), containsIp(P1.getEndIp())))));
/* Should not contain other IPs */
assertThat(
result,
hasEntry(
equalTo(c1),
hasEntry(equalTo(v1), hasEntry(equalTo(i1), not(containsIp(P2.getEndIp()))))));
}
@Test
public void testComputeNeighborUnreachableArpNextHopIp() {
String c1 = "c1";
String v1 = "v1";
String i1 = "i1";
AbstractRoute r1 =
StaticRoute.builder()
.setNetwork(P1)
.setNextHopIp(P2.getStartIp())
.setAdministrativeCost(1)
.build();
_routesWithNextHop =
ImmutableMap.of(c1, ImmutableMap.of(v1, ImmutableMap.of(i1, ImmutableSet.of(r1))));
_routesWithNextHopIpArpFalse =
ImmutableMap.of(c1, ImmutableMap.of(v1, ImmutableMap.of(i1, ImmutableSet.of(r1))));
SortedMap<String, SortedMap<String, GenericRib<AbstractRoute>>> ribs =
ImmutableSortedMap.of(
c1,
ImmutableSortedMap.of(
v1, MockRib.builder().setMatchingIps(ImmutableMap.of(P1, P1.toIpSpace())).build()));
/*
_interfaceHostSubnetIps =
ImmutableMap.of(c1, ImmutableMap.of(v1, ImmutableMap.of(i1, EmptyIpSpace.INSTANCE)));
*/
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, Map<String, Map<String, IpSpace>>> result =
forwardingAnalysisImpl.computeArpFalseNextHopIp(computeMatchingIps(ribs));
/* IPs matching some route on interface with no response should appear */
assertThat(
result,
hasEntry(
equalTo(c1),
hasEntry(equalTo(v1), hasEntry(equalTo(i1), containsIp(P1.getStartIp())))));
assertThat(
result,
hasEntry(
equalTo(c1), hasEntry(equalTo(v1), hasEntry(equalTo(i1), containsIp(P1.getEndIp())))));
/* Other IPs should not appear */
assertThat(
result,
hasEntry(
equalTo(c1),
hasEntry(equalTo(v1), hasEntry(equalTo(i1), not(containsIp(P2.getStartIp()))))));
}
@Test
public void testComputeNullRoutedIps() {
String c1 = "c1";
String v1 = "v1";
String i1 = "i1";
GenericRib<AbstractRoute> rib1 =
MockRib.builder()
.setMatchingIps(
ImmutableMap.of(
P1,
AclIpSpace.permitting(P1.toIpSpace()).build(),
P2,
AclIpSpace.permitting(P2.toIpSpace()).build()))
.build();
SortedMap<String, SortedMap<String, GenericRib<AbstractRoute>>> ribs =
ImmutableSortedMap.of(c1, ImmutableSortedMap.of(v1, rib1));
AbstractRoute nullRoute =
StaticRoute.builder()
.setNextHopInterface(Interface.NULL_INTERFACE_NAME)
.setNetwork(P1)
.setAdministrativeCost(1)
.build();
AbstractRoute otherRoute = new ConnectedRoute(P2, i1);
Map<String, Map<String, Fib>> fibs =
ImmutableSortedMap.of(
c1,
ImmutableSortedMap.of(
v1,
MockFib.builder()
.setNextHopInterfaces(
ImmutableMap.of(
nullRoute,
ImmutableMap.of(
Interface.NULL_INTERFACE_NAME,
ImmutableMap.of(
Route.UNSET_ROUTE_NEXT_HOP_IP, ImmutableSet.of(nullRoute))),
otherRoute,
ImmutableMap.of(
i1,
ImmutableMap.of(
Route.UNSET_ROUTE_NEXT_HOP_IP, ImmutableSet.of(otherRoute)))))
.build()));
Map<String, Map<String, IpSpace>> result =
ForwardingAnalysisImpl.computeNullRoutedIps(computeMatchingIps(ribs), fibs);
/* IPs for the null route should appear */
assertThat(result, hasEntry(equalTo(c1), hasEntry(equalTo(v1), containsIp(P1.getStartIp()))));
assertThat(result, hasEntry(equalTo(c1), hasEntry(equalTo(v1), containsIp(P1.getEndIp()))));
/* IPs for the non-null route should not appear */
assertThat(
result, hasEntry(equalTo(c1), hasEntry(equalTo(v1), not(containsIp(P2.getStartIp())))));
assertThat(
result, hasEntry(equalTo(c1), hasEntry(equalTo(v1), not(containsIp(P2.getEndIp())))));
}
/**
* The neighbor unreachable or exits network predicate map should not include an entry for null
* interface.
*/
@Test
public void testComputeNeighborUnreachbleOrExitsNetwork_nullInterface() {
NetworkFactory nf = new NetworkFactory();
Configuration c =
nf.configurationBuilder().setConfigurationFormat(ConfigurationFormat.CISCO_IOS).build();
Vrf v = nf.vrfBuilder().setOwner(c).build();
StaticRoute nullRoute =
StaticRoute.builder()
.setNetwork(Prefix.parse("1.0.0.0/8"))
.setNextHopInterface(Interface.NULL_INTERFACE_NAME)
.setAdministrativeCost(1)
.build();
IpSpace ipSpace = IpWildcardSetIpSpace.builder().including(new IpWildcard("1.0.0.0/8")).build();
v.setStaticRoutes(ImmutableSortedSet.of(nullRoute));
SortedMap<String, Configuration> configs = ImmutableSortedMap.of(c.getHostname(), c);
MockRib mockRib =
MockRib.builder()
.setRoutes(ImmutableSet.of(nullRoute))
.setRoutableIps(ipSpace)
.setMatchingIps(ImmutableMap.of(Prefix.parse("1.0.0.0/8"), ipSpace))
.build();
MockFib mockFib =
MockFib.builder()
.setNextHopInterfaces(
ImmutableMap.of(
nullRoute,
ImmutableMap.of(
Interface.NULL_INTERFACE_NAME,
ImmutableMap.of(Ip.AUTO, ImmutableSet.of(nullRoute)))))
.setRoutesByNextHopInterface(
ImmutableMap.of(Interface.NULL_INTERFACE_NAME, ImmutableSet.of(nullRoute)))
.build();
SortedMap<String, SortedMap<String, GenericRib<AbstractRoute>>> ribs =
ImmutableSortedMap.of(c.getHostname(), ImmutableSortedMap.of(v.getName(), mockRib));
Map<String, Map<String, Fib>> fibs =
ImmutableMap.of(c.getHostname(), ImmutableMap.of(v.getName(), mockFib));
ForwardingAnalysisImpl forwardingAnalysisImpl =
new ForwardingAnalysisImpl(configs, ribs, fibs, new Topology(ImmutableSortedSet.of()));
Map<String, Map<String, Map<String, IpSpace>>> neighborUnreachableOrExitsNetwork =
forwardingAnalysisImpl.getNeighborUnreachableOrExitsNetwork();
assertThat(
neighborUnreachableOrExitsNetwork,
hasEntry(
equalTo(c.getHostname()),
hasEntry(equalTo(v.getName()), not(hasKey(Interface.NULL_INTERFACE_NAME)))));
}
@Test
public void testComputeRoutableIps() {
String c1 = "c1";
String v1 = "v1";
SortedMap<String, SortedMap<String, GenericRib<AbstractRoute>>> ribs =
ImmutableSortedMap.of(
c1, ImmutableSortedMap.of(v1, MockRib.builder().setRoutableIps(IPSPACE1).build()));
Map<String, Map<String, IpSpace>> result = ForwardingAnalysisImpl.computeRoutableIps(ribs);
assertThat(result, equalTo(ImmutableMap.of(c1, ImmutableMap.of(v1, IPSPACE1))));
}
@Test
public void testComputeRouteMatchConditions() {
Set<AbstractRoute> routes =
ImmutableSet.of(new ConnectedRoute(P1, INTERFACE1), new ConnectedRoute(P2, INTERFACE2));
MockRib rib =
MockRib.builder().setMatchingIps(ImmutableMap.of(P1, IPSPACE1, P2, IPSPACE2)).build();
/* Resulting IP space should permit matching IPs */
assertThat(
ForwardingAnalysisImpl.computeRouteMatchConditions(routes, rib.getMatchingIps()),
isAclIpSpaceThat(
hasLines(
containsInAnyOrder(
AclIpSpaceLine.permit(IPSPACE1), AclIpSpaceLine.permit(IPSPACE2)))));
}
@Test
public void testComputeRoutesWhereDstIpCanBeArpIp() {
String c1 = "c1";
String v1 = "v1";
String i1 = "i1";
AbstractRoute r1 =
StaticRoute.builder()
.setNetwork(P1)
.setNextHopIp(P2.getStartIp())
.setAdministrativeCost(1)
.build();
AbstractRoute ifaceRoute = new ConnectedRoute(P2, i1);
_routesWithNextHop =
ImmutableMap.of(
c1, ImmutableMap.of(v1, ImmutableMap.of(i1, ImmutableSet.of(r1, ifaceRoute))));
Map<String, Map<String, Fib>> fibs =
ImmutableMap.of(
c1,
ImmutableMap.of(
v1,
MockFib.builder()
.setNextHopInterfaces(
ImmutableMap.of(
r1,
ImmutableMap.of(
i1,
ImmutableMap.of(r1.getNextHopIp(), ImmutableSet.of(ifaceRoute))),
ifaceRoute,
ImmutableMap.of(
i1,
ImmutableMap.of(
Route.UNSET_ROUTE_NEXT_HOP_IP, ImmutableSet.of(ifaceRoute)))))
.build()));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, Map<String, Map<String, Set<AbstractRoute>>>> result =
forwardingAnalysisImpl.computeRoutesWhereDstIpCanBeArpIp(fibs);
/* Only the interface route should show up */
assertThat(
result,
equalTo(
ImmutableMap.of(
c1, ImmutableMap.of(v1, ImmutableMap.of(i1, ImmutableSet.of(ifaceRoute))))));
}
@Test
public void testComputeRoutesWithDestIpEdge() {
String c1 = "c1";
String c2 = "c2";
String v1 = "v1";
String i1 = "i1";
String i2 = "i2";
AbstractRoute r1 = new ConnectedRoute(P1, i1);
_routesWhereDstIpCanBeArpIp =
ImmutableMap.of(c1, ImmutableMap.of(v1, ImmutableMap.of(i1, ImmutableSet.of(r1))));
Edge e1 = Edge.of(c1, i1, c2, i2);
_arpReplies = ImmutableMap.of(c2, ImmutableMap.of(i2, P2.getStartIp().toIpSpace()));
Topology topology = new Topology(ImmutableSortedSet.of(e1));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<Edge, Set<AbstractRoute>> result =
forwardingAnalysisImpl.computeRoutesWithDestIpEdge(topology);
assertThat(result, equalTo(ImmutableMap.of(e1, ImmutableSet.of(r1))));
}
@Test
public void testComputeRoutesWithNextHop() {
String c1 = "c1";
String v1 = "v1";
String i1 = "i1";
ConnectedRoute r1 = new ConnectedRoute(P1, i1);
Map<String, Map<String, Fib>> fibs =
ImmutableMap.of(
c1,
ImmutableMap.of(
v1,
MockFib.builder()
.setRoutesByNextHopInterface(ImmutableMap.of(i1, ImmutableSet.of(r1)))
.build()));
Configuration config = _cb.setHostname(c1).build();
Vrf vrf = _vb.setName(v1).setOwner(config).build();
_ib.setName(i1).setVrf(vrf).setOwner(config).build();
Map<String, Map<String, Map<String, Set<AbstractRoute>>>> result =
ForwardingAnalysisImpl.computeRoutesWithNextHop(ImmutableMap.of(c1, config), fibs);
assertThat(
result,
equalTo(
ImmutableMap.of(c1, ImmutableMap.of(v1, ImmutableMap.of(i1, ImmutableSet.of(r1))))));
}
@Test
public void testComputeRoutesWithNextHopIpArpFalse() {
String c1 = "c1";
String v1 = "v1";
String i1 = "i1";
AbstractRoute r1 =
StaticRoute.builder()
.setNetwork(P1)
.setNextHopIp(P2.getStartIp())
.setAdministrativeCost(1)
.build();
_routesWithNextHop =
ImmutableMap.of(c1, ImmutableMap.of(v1, ImmutableMap.of(i1, ImmutableSet.of(r1))));
AbstractRoute ifaceRoute = new ConnectedRoute(P2, i1);
Map<String, Map<String, Fib>> fibs =
ImmutableMap.of(
c1,
ImmutableMap.of(
v1,
MockFib.builder()
.setNextHopInterfaces(
ImmutableMap.of(
r1,
ImmutableMap.of(
i1,
ImmutableMap.of(r1.getNextHopIp(), ImmutableSet.of(ifaceRoute)))))
.build()));
_someoneReplies = ImmutableMap.of(c1, ImmutableMap.of(i1, P2.getEndIp().toIpSpace()));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, Map<String, Map<String, Set<AbstractRoute>>>> result =
forwardingAnalysisImpl.computeRoutesWithNextHopIpArpFalse(fibs);
assertThat(
result,
equalTo(
ImmutableMap.of(c1, ImmutableMap.of(v1, ImmutableMap.of(i1, ImmutableSet.of(r1))))));
}
@Test
public void testComputeRoutesWithNextHopIpArpFalseForInterface() {
String hostname = "c1";
String outInterface = "i1";
AbstractRoute nextHopIpRoute1 =
StaticRoute.builder()
.setNetwork(P1)
.setNextHopIp(P2.getStartIp())
.setAdministrativeCost(1)
.build();
AbstractRoute nextHopIpRoute2 =
StaticRoute.builder()
.setNetwork(P1)
.setNextHopIp(P2.getEndIp())
.setAdministrativeCost(1)
.build();
AbstractRoute ifaceRoute = new ConnectedRoute(P2, outInterface);
Set<AbstractRoute> candidateRoutes =
ImmutableSet.of(nextHopIpRoute1, nextHopIpRoute2, ifaceRoute);
_someoneReplies =
ImmutableMap.of(hostname, ImmutableMap.of(outInterface, P2.getStartIp().toIpSpace()));
Fib fib =
MockFib.builder()
.setNextHopInterfaces(
ImmutableMap.of(
nextHopIpRoute1,
ImmutableMap.of(
outInterface,
ImmutableMap.of(
nextHopIpRoute1.getNextHopIp(), ImmutableSet.of(ifaceRoute))),
nextHopIpRoute2,
ImmutableMap.of(
outInterface,
ImmutableMap.of(
nextHopIpRoute2.getNextHopIp(), ImmutableSet.of(ifaceRoute))),
ifaceRoute,
ImmutableMap.of(
outInterface,
ImmutableMap.of(
Route.UNSET_ROUTE_NEXT_HOP_IP, ImmutableSet.of(ifaceRoute)))))
.build();
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Set<AbstractRoute> result =
forwardingAnalysisImpl.computeRoutesWithNextHopIpArpFalseForInterface(
fib, hostname, outInterface, candidateRoutes);
/*
* Should only contain nextHopIpRoute1 since it is the only route with a next-hop-ip for which
* there is no ARP reply.
*/
assertThat(result, equalTo(ImmutableSet.of(nextHopIpRoute2)));
}
@Test
public void testComputeRoutesWithNextHopIpArpFalseForInterfaceNoNeighbors() {
String hostname = "c1";
String outInterface = "i1";
AbstractRoute nextHopIpRoute1 =
StaticRoute.builder()
.setNetwork(P1)
.setNextHopIp(P2.getStartIp())
.setAdministrativeCost(1)
.build();
AbstractRoute nextHopIpRoute2 =
StaticRoute.builder()
.setNetwork(P1)
.setNextHopIp(P2.getEndIp())
.setAdministrativeCost(1)
.build();
AbstractRoute ifaceRoute = new ConnectedRoute(P2, outInterface);
Set<AbstractRoute> candidateRoutes =
ImmutableSet.of(nextHopIpRoute1, nextHopIpRoute2, ifaceRoute);
_someoneReplies = ImmutableMap.of();
Fib fib =
MockFib.builder()
.setNextHopInterfaces(
ImmutableMap.of(
nextHopIpRoute1,
ImmutableMap.of(
outInterface,
ImmutableMap.of(
nextHopIpRoute1.getNextHopIp(), ImmutableSet.of(ifaceRoute))),
nextHopIpRoute2,
ImmutableMap.of(
outInterface,
ImmutableMap.of(
nextHopIpRoute2.getNextHopIp(), ImmutableSet.of(ifaceRoute))),
ifaceRoute,
ImmutableMap.of(
outInterface,
ImmutableMap.of(
Route.UNSET_ROUTE_NEXT_HOP_IP, ImmutableSet.of(ifaceRoute)))))
.build();
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Set<AbstractRoute> result =
forwardingAnalysisImpl.computeRoutesWithNextHopIpArpFalseForInterface(
fib, hostname, outInterface, candidateRoutes);
/*
* Should contain both nextHopIpRoute1 and nextHopIpRoute2, since:
* 1) They are the only routes with a next hop IP.
* 2) Their next hop IPs do not receive ARP reply since _someoneReplies is empty.
*/
assertThat(result, equalTo(ImmutableSet.of(nextHopIpRoute1, nextHopIpRoute2)));
}
@Test
public void testComputeRoutesWithNextHopIpArpTrue() {
String c1 = "c1";
String i1 = "i1";
String c2 = "c2";
String i2 = "i2";
Edge e1 = Edge.of(c1, i1, c2, i2);
_arpReplies = ImmutableMap.of(c2, ImmutableMap.of(i2, P2.getStartIp().toIpSpace()));
Topology topology = new Topology(ImmutableSortedSet.of(e1));
String v1 = "v1";
AbstractRoute r1 =
StaticRoute.builder()
.setNetwork(P1)
.setNextHopIp(P2.getStartIp())
.setAdministrativeCost(1)
.build();
AbstractRoute r2 =
StaticRoute.builder()
.setNetwork(P1)
.setNextHopIp(P2.getEndIp())
.setAdministrativeCost(1)
.build();
_routesWithNextHop =
ImmutableMap.of(c1, ImmutableMap.of(v1, ImmutableMap.of(i1, ImmutableSet.of(r1, r2))));
AbstractRoute ifaceRoute = new ConnectedRoute(P2, i1);
Map<String, Map<String, Fib>> fibs =
ImmutableMap.of(
c1,
ImmutableMap.of(
v1,
MockFib.builder()
.setNextHopInterfaces(
ImmutableMap.of(
r1,
ImmutableMap.of(
i1,
ImmutableMap.of(r1.getNextHopIp(), ImmutableSet.of(ifaceRoute))),
r2,
ImmutableMap.of(
i1,
ImmutableMap.of(r2.getNextHopIp(), ImmutableSet.of(ifaceRoute)))))
.build()));
_someoneReplies = ImmutableMap.of(c1, ImmutableMap.of(i1, P2.getEndIp().toIpSpace()));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<Edge, Set<AbstractRoute>> result =
forwardingAnalysisImpl.computeRoutesWithNextHopIpArpTrue(fibs, topology);
/* Only the route with the next hop ip that gets a reply should be present. */
assertThat(result, equalTo(ImmutableMap.of(e1, ImmutableSet.of(r1))));
}
@Test
public void testComputeSomeoneReplies() {
String c1 = "c1";
String i1 = "i1";
String c2 = "c2";
String i2 = "i2";
Edge e1 = Edge.of(c1, i1, c2, i2);
_arpReplies = ImmutableMap.of(c2, ImmutableMap.of(i2, P1.toIpSpace()));
Topology topology = new Topology(ImmutableSortedSet.of(e1));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, Map<String, IpSpace>> result =
forwardingAnalysisImpl.computeSomeoneReplies(topology);
/* IPs allowed by neighbor should appear */
assertThat(result, hasEntry(equalTo(c1), hasEntry(equalTo(i1), containsIp(P1.getStartIp()))));
assertThat(result, hasEntry(equalTo(c1), hasEntry(equalTo(i1), containsIp(P1.getEndIp()))));
/* IPs not allowed by neighbor should not appear */
assertThat(
result, hasEntry(equalTo(c1), hasEntry(equalTo(i1), not(containsIp(P2.getStartIp())))));
}
@Test
public void testComputeInterfaceHostSubnetIps() {
Configuration c1 = _cb.build();
Map<String, Configuration> configs = ImmutableMap.of(c1.getHostname(), c1);
Vrf vrf1 = _vb.setOwner(c1).build();
Interface i1 =
_ib.setOwner(c1)
.setVrf(vrf1)
.setAddress(new InterfaceAddress(P1.getStartIp(), P1.getPrefixLength()))
.build();
Map<String, Map<String, Map<String, IpSpace>>> interfaceHostSubnetIps =
computeInterfaceHostSubnetIps(configs);
assertThat(
interfaceHostSubnetIps,
hasEntry(
equalTo(c1.getHostname()),
hasEntry(
equalTo(vrf1.getName()),
hasEntry(equalTo(i1.getName()), (containsIp(Ip.parse("1.0.0.2")))))));
assertThat(
interfaceHostSubnetIps,
hasEntry(
equalTo(c1.getHostname()),
hasEntry(
equalTo(vrf1.getName()),
hasEntry(equalTo(i1.getName()), not(containsIp(P1.getStartIp()))))));
assertThat(
interfaceHostSubnetIps,
hasEntry(
equalTo(c1.getHostname()),
hasEntry(
equalTo(vrf1.getName()),
hasEntry(equalTo(i1.getName()), not(containsIp(P1.getEndIp()))))));
}
@Test
public void testComputeInterfaceHostSubnetIpsWithPrefixLength31() {
Configuration c1 = _cb.build();
Map<String, Configuration> configs = ImmutableMap.of(c1.getHostname(), c1);
Vrf vrf1 = _vb.setOwner(c1).build();
Prefix prefix = Prefix.parse("1.0.0.1/31");
Interface i1 =
_ib.setOwner(c1)
.setVrf(vrf1)
.setAddress(new InterfaceAddress(prefix.getStartIp(), prefix.getPrefixLength()))
.build();
Map<String, Map<String, Map<String, IpSpace>>> interfaceHostSubnetIps =
computeInterfaceHostSubnetIps(configs);
assertThat(
interfaceHostSubnetIps,
hasEntry(
equalTo(c1.getHostname()),
hasEntry(
equalTo(vrf1.getName()),
hasEntry(equalTo(i1.getName()), containsIp(prefix.getStartIp())))));
assertThat(
interfaceHostSubnetIps,
hasEntry(
equalTo(c1.getHostname()),
hasEntry(
equalTo(vrf1.getName()),
hasEntry(equalTo(i1.getName()), containsIp(prefix.getEndIp())))));
}
@Test
public void testComputeDeliveredToSubnetNoArpFalse() {
String c1 = "c1";
String vrf1 = "vrf1";
String i1 = "i1";
Ip ip = Ip.parse("10.0.0.1");
_arpFalseDestIp =
ImmutableMap.of(c1, ImmutableMap.of(vrf1, ImmutableMap.of(i1, EmptyIpSpace.INSTANCE)));
_interfaceHostSubnetIps =
ImmutableMap.of(c1, ImmutableMap.of(vrf1, ImmutableMap.of(i1, ip.toIpSpace())));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, Map<String, Map<String, IpSpace>>> result =
forwardingAnalysisImpl.computeDeliveredToSubnet();
assertThat(
result,
hasEntry(equalTo(c1), hasEntry(equalTo(vrf1), hasEntry(equalTo(i1), not(containsIp(ip))))));
}
@Test
public void testComputeDeliveredToSubnetNoInterfaceHostIps() {
String c1 = "c1";
String vrf1 = "vrf1";
String i1 = "i1";
Ip ip = Ip.parse("10.0.0.1");
_arpFalseDestIp =
ImmutableMap.of(c1, ImmutableMap.of(vrf1, ImmutableMap.of(i1, ip.toIpSpace())));
_interfaceHostSubnetIps =
ImmutableMap.of(c1, ImmutableMap.of(vrf1, ImmutableMap.of(i1, EmptyIpSpace.INSTANCE)));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, Map<String, Map<String, IpSpace>>> result =
forwardingAnalysisImpl.computeDeliveredToSubnet();
assertThat(
result,
hasEntry(equalTo(c1), hasEntry(equalTo(vrf1), hasEntry(equalTo(i1), not(containsIp(ip))))));
}
@Test
public void testComputeDeliveredToSubnetEqual() {
String c1 = "c1";
String vrf1 = "vrf1";
String i1 = "i1";
Ip ip = Ip.parse("10.0.0.1");
_arpFalseDestIp =
ImmutableMap.of(c1, ImmutableMap.of(vrf1, ImmutableMap.of(i1, ip.toIpSpace())));
_interfaceHostSubnetIps =
ImmutableMap.of(c1, ImmutableMap.of(vrf1, ImmutableMap.of(i1, ip.toIpSpace())));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
Map<String, Map<String, Map<String, IpSpace>>> result =
forwardingAnalysisImpl.computeDeliveredToSubnet();
assertThat(
result,
hasEntry(equalTo(c1), hasEntry(equalTo(vrf1), hasEntry(equalTo(i1), containsIp(ip)))));
}
enum NextHopIpStatus {
NONE,
INTERNAL,
EXTERNAL
}
private void testDispositionComputationTemplate(
NextHopIpStatus nextHopIpStatus,
boolean isSubnetFull,
boolean isDstIpInternal,
boolean isDstIpInSubnet,
FlowDisposition expectedDisposition) {
String nextHopIpString = "1.0.0.1";
Prefix dstPrefix = P3;
Ip nextHopIp = Ip.parse(nextHopIpString);
StaticRoute route =
StaticRoute.builder()
.setNextHopIp(Ip.parse(nextHopIpString))
.setNextHopInterface(INTERFACE1)
.setAdministrativeCost(1)
.setNetwork(dstPrefix)
.build();
AclIpSpace.Builder internalIpsBuilder = AclIpSpace.builder();
if (!isSubnetFull) {
_interfacesWithMissingDevices = ImmutableMap.of(CONFIG1, ImmutableSet.of(INTERFACE1));
} else {
_interfacesWithMissingDevices = ImmutableMap.of(CONFIG1, ImmutableSet.of());
}
if (nextHopIpStatus == NextHopIpStatus.EXTERNAL) {
_routesWithUnownedNextHopIpArpFalse =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, ImmutableSet.of(route))));
_routesWithOwnedNextHopIpArpFalse =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, ImmutableSet.of())));
_arpFalseDestIp =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, EmptyIpSpace.INSTANCE)));
_dstIpsWithOwnedNextHopIpArpFalse =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, EmptyIpSpace.INSTANCE)));
_dstIpsWithUnownedNextHopIpArpFalse =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, dstPrefix.toIpSpace())));
} else if (nextHopIpStatus == NextHopIpStatus.INTERNAL) {
_routesWithUnownedNextHopIpArpFalse =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, ImmutableSet.of())));
_routesWithOwnedNextHopIpArpFalse =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, ImmutableSet.of(route))));
_arpFalseDestIp =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, EmptyIpSpace.INSTANCE)));
internalIpsBuilder.thenPermitting(nextHopIp.toIpSpace());
_dstIpsWithOwnedNextHopIpArpFalse =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, dstPrefix.toIpSpace())));
_dstIpsWithUnownedNextHopIpArpFalse =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, EmptyIpSpace.INSTANCE)));
} else {
_routesWithUnownedNextHopIpArpFalse =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, ImmutableSet.of())));
_routesWithOwnedNextHopIpArpFalse =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, ImmutableSet.of())));
_arpFalseDestIp =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, dstPrefix.toIpSpace())));
_dstIpsWithOwnedNextHopIpArpFalse =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, EmptyIpSpace.INSTANCE)));
_dstIpsWithUnownedNextHopIpArpFalse =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, EmptyIpSpace.INSTANCE)));
}
if (isDstIpInternal) {
internalIpsBuilder.thenPermitting(dstPrefix.toIpSpace());
}
_internalIps = internalIpsBuilder.build();
if (isDstIpInSubnet) {
_interfaceHostSubnetIps =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, dstPrefix.toIpSpace())));
} else {
_interfaceHostSubnetIps =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, EmptyIpSpace.INSTANCE)));
}
_neighborUnreachableOrExitsNetwork =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, dstPrefix.toIpSpace())));
ForwardingAnalysisImpl forwardingAnalysisImpl = initForwardingAnalysisImpl();
IpSpace deliveredToSubnetIpSpace =
forwardingAnalysisImpl
.computeDeliveredToSubnet()
.getOrDefault(CONFIG1, ImmutableMap.of())
.getOrDefault(VRF1, ImmutableMap.of())
.getOrDefault(INTERFACE1, EmptyIpSpace.INSTANCE);
IpSpace exitsNetworkIpSpace =
forwardingAnalysisImpl.computeExitsNetworkPerInterface(CONFIG1, VRF1, INTERFACE1);
IpSpace insufficientInfoIpSpace =
forwardingAnalysisImpl.computeInsufficientInfoPerInterface(CONFIG1, VRF1, INTERFACE1);
IpSpace neighborUnreachableIpSpace =
forwardingAnalysisImpl.computeNeighborUnreachable().get(CONFIG1).get(VRF1).get(INTERFACE1);
if (expectedDisposition == FlowDisposition.EXITS_NETWORK) {
assertThat(exitsNetworkIpSpace, containsIp(dstPrefix.getStartIp()));
assertThat(exitsNetworkIpSpace, containsIp(dstPrefix.getEndIp()));
} else {
assertThat(exitsNetworkIpSpace, not(containsIp(dstPrefix.getStartIp())));
assertThat(exitsNetworkIpSpace, not(containsIp(dstPrefix.getEndIp())));
}
if (expectedDisposition == FlowDisposition.INSUFFICIENT_INFO) {
assertThat(insufficientInfoIpSpace, containsIp(dstPrefix.getStartIp()));
assertThat(insufficientInfoIpSpace, containsIp(dstPrefix.getEndIp()));
} else {
assertThat(insufficientInfoIpSpace, not(containsIp(dstPrefix.getStartIp())));
assertThat(insufficientInfoIpSpace, not(containsIp(dstPrefix.getEndIp())));
}
if (expectedDisposition == FlowDisposition.DELIVERED_TO_SUBNET) {
assertThat(deliveredToSubnetIpSpace, containsIp(dstPrefix.getStartIp()));
assertThat(deliveredToSubnetIpSpace, containsIp(dstPrefix.getEndIp()));
} else {
assertThat(deliveredToSubnetIpSpace, not(containsIp(dstPrefix.getStartIp())));
assertThat(deliveredToSubnetIpSpace, not(containsIp(dstPrefix.getEndIp())));
}
if (expectedDisposition == FlowDisposition.NEIGHBOR_UNREACHABLE) {
assertThat(neighborUnreachableIpSpace, (containsIp(dstPrefix.getStartIp())));
assertThat(neighborUnreachableIpSpace, (containsIp(dstPrefix.getEndIp())));
} else {
assertThat(neighborUnreachableIpSpace, not(containsIp(dstPrefix.getStartIp())));
assertThat(neighborUnreachableIpSpace, not(containsIp(dstPrefix.getEndIp())));
}
}
@Test
public void testDispositionComputation() {
/*
* Avoid the case where arp dst ip, interface is full, and dst ip is in subnet (would be accepted).
* Avoid cases where dst ip is internal but not in subet.
*/
// Arp dst ip, interface is full, dst ip is internal -> neighbor unreachable
testDispositionComputationTemplate(
NextHopIpStatus.NONE, true, true, false, FlowDisposition.NEIGHBOR_UNREACHABLE);
// Arp dst ip, interface is full, dst ip is external -> neighbor unreachable
testDispositionComputationTemplate(
NextHopIpStatus.NONE, true, false, false, FlowDisposition.NEIGHBOR_UNREACHABLE);
// Arp dst ip, interface is not full, dst ip is subnet -> delivered to subnet
testDispositionComputationTemplate(
NextHopIpStatus.NONE, false, true, true, FlowDisposition.DELIVERED_TO_SUBNET);
// Arp dst ip, interface is not full, dst ip is internal -> insufficient info
testDispositionComputationTemplate(
NextHopIpStatus.NONE, false, true, false, FlowDisposition.INSUFFICIENT_INFO);
// Arp dst ip, interface is not full, dst ip is external -> exits network
testDispositionComputationTemplate(
NextHopIpStatus.NONE, false, false, false, FlowDisposition.EXITS_NETWORK);
// nhip external, interface is full, dst ip is internal -> neighbor unreachable
testDispositionComputationTemplate(
NextHopIpStatus.EXTERNAL, true, true, true, FlowDisposition.NEIGHBOR_UNREACHABLE);
testDispositionComputationTemplate(
NextHopIpStatus.EXTERNAL, true, true, false, FlowDisposition.NEIGHBOR_UNREACHABLE);
// nhip external, interface is full, dst ip is external -> neighbor unreachable
testDispositionComputationTemplate(
NextHopIpStatus.EXTERNAL, true, false, false, FlowDisposition.NEIGHBOR_UNREACHABLE);
// nhip external, interface is not full, dst ip is internal -> insufficient info
testDispositionComputationTemplate(
NextHopIpStatus.EXTERNAL, false, true, true, FlowDisposition.INSUFFICIENT_INFO);
testDispositionComputationTemplate(
NextHopIpStatus.EXTERNAL, false, true, false, FlowDisposition.INSUFFICIENT_INFO);
// nhip external, interface is not full, dst ip is external -> exits network
testDispositionComputationTemplate(
NextHopIpStatus.EXTERNAL, false, false, false, FlowDisposition.EXITS_NETWORK);
// nhip internal, interface is full, dst ip is internal -> neighbor unreachable
testDispositionComputationTemplate(
NextHopIpStatus.INTERNAL, true, true, true, FlowDisposition.NEIGHBOR_UNREACHABLE);
testDispositionComputationTemplate(
NextHopIpStatus.INTERNAL, true, true, false, FlowDisposition.NEIGHBOR_UNREACHABLE);
// nhip internal, interface is full, dst ip is external -> neighbor unreachable
testDispositionComputationTemplate(
NextHopIpStatus.INTERNAL, true, false, false, FlowDisposition.NEIGHBOR_UNREACHABLE);
// nhip internal, interface is not full, dst ip is internal -> insufficient info
testDispositionComputationTemplate(
NextHopIpStatus.INTERNAL, false, true, true, FlowDisposition.INSUFFICIENT_INFO);
testDispositionComputationTemplate(
NextHopIpStatus.INTERNAL, false, true, false, FlowDisposition.INSUFFICIENT_INFO);
// nhip internal, interface is not full, dst ip is external -> insufficient info
testDispositionComputationTemplate(
NextHopIpStatus.INTERNAL, false, false, false, FlowDisposition.INSUFFICIENT_INFO);
}
@Test
public void testHasMissingDevicesOnInterface_full30() {
Prefix subnet = Prefix.parse("1.0.0.0/30");
Ip ip1 = Ip.parse("1.0.0.1");
Ip ip2 = Ip.parse("1.0.0.2");
_interfaceHostSubnetIps =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, subnet.toHostIpSpace())));
_interfaceOwnedIps =
ImmutableMap.of(
CONFIG1,
ImmutableMap.of(INTERFACE1, ImmutableSet.of(ip1), INTERFACE2, ImmutableSet.of(ip2)));
ForwardingAnalysisImpl fa = initForwardingAnalysisImpl();
assertThat("INTERFACE1 should be full", !fa.hasMissingDevicesOnInterface(CONFIG1, INTERFACE1));
}
@Test
public void testHasMissingDevicesOnInterface_full31() {
Prefix subnet = Prefix.parse("1.0.0.0/31");
Ip ip1 = Ip.parse("1.0.0.0");
Ip ip2 = Ip.parse("1.0.0.1");
_interfaceHostSubnetIps =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, subnet.toHostIpSpace())));
_interfaceOwnedIps =
ImmutableMap.of(
CONFIG1,
ImmutableMap.of(INTERFACE1, ImmutableSet.of(ip1), INTERFACE2, ImmutableSet.of(ip2)));
ForwardingAnalysisImpl fa = initForwardingAnalysisImpl();
assertThat("INTERFACE1 should be full", !fa.hasMissingDevicesOnInterface(CONFIG1, INTERFACE1));
}
@Test
public void testHasMissingDevicesOnInterface_full32() {
Prefix subnet = Prefix.parse("1.0.0.0/32");
Ip ip1 = Ip.parse("1.0.0.0");
_interfaceHostSubnetIps =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, subnet.toHostIpSpace())));
_interfaceOwnedIps =
ImmutableMap.of(CONFIG1, ImmutableMap.of(INTERFACE1, ImmutableSet.of(ip1)));
ForwardingAnalysisImpl fa = initForwardingAnalysisImpl();
assertThat("INTERFACE1 should be full", !fa.hasMissingDevicesOnInterface(CONFIG1, INTERFACE1));
}
@Test
public void testHasMissingDevicesOnInterface_notFull30() {
Prefix subnet = Prefix.parse("1.0.0.0/30");
Ip ip1 = Ip.parse("1.0.0.1");
_interfaceHostSubnetIps =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, subnet.toHostIpSpace())));
_interfaceOwnedIps =
ImmutableMap.of(CONFIG1, ImmutableMap.of(INTERFACE1, ImmutableSet.of(ip1)));
ForwardingAnalysisImpl fa = initForwardingAnalysisImpl();
assertThat(
"INTERFACE1 should not be full", fa.hasMissingDevicesOnInterface(CONFIG1, INTERFACE1));
}
@Test
public void testHasMissingDevicesOnInterface_notFull31() {
Prefix subnet = Prefix.parse("1.0.0.0/31");
Ip ip1 = Ip.parse("1.0.0.0");
_interfaceHostSubnetIps =
ImmutableMap.of(
CONFIG1, ImmutableMap.of(VRF1, ImmutableMap.of(INTERFACE1, subnet.toHostIpSpace())));
_interfaceOwnedIps =
ImmutableMap.of(CONFIG1, ImmutableMap.of(INTERFACE1, ImmutableSet.of(ip1)));
ForwardingAnalysisImpl fa = initForwardingAnalysisImpl();
assertThat(
"INTERFACE1 should not be full", fa.hasMissingDevicesOnInterface(CONFIG1, INTERFACE1));
}
}
| New disposition test when topology provided (#3046)
* New disposition test when topology provided
* small fix
* remove unused var
| projects/batfish-common-protocol/src/test/java/org/batfish/datamodel/ForwardingAnalysisImplTest.java | New disposition test when topology provided (#3046) |
|
Java | apache-2.0 | 515b7d932a345d3000b3d07719d9fd72f5dab1c6 | 0 | vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa | // Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.vespa.hosted.controller.maintenance;
import com.yahoo.config.provision.Environment;
import com.yahoo.config.provision.NodeType;
import com.yahoo.config.provision.SystemName;
import com.yahoo.config.provision.zone.ZoneId;
import com.yahoo.text.Text;
import com.yahoo.vespa.hosted.controller.Controller;
import com.yahoo.vespa.hosted.controller.api.integration.configserver.Node;
import com.yahoo.vespa.hosted.controller.api.integration.configserver.NodeFilter;
import com.yahoo.vespa.hosted.controller.api.integration.configserver.NodeRepository;
import com.yahoo.vespa.hosted.controller.api.integration.vcmr.ChangeRequest;
import com.yahoo.vespa.hosted.controller.api.integration.vcmr.ChangeRequest.Impact;
import com.yahoo.vespa.hosted.controller.api.integration.vcmr.ChangeRequestClient;
import com.yahoo.vespa.hosted.controller.api.integration.vcmr.HostAction;
import com.yahoo.vespa.hosted.controller.api.integration.vcmr.HostAction.State;
import com.yahoo.vespa.hosted.controller.api.integration.vcmr.VcmrReport;
import com.yahoo.vespa.hosted.controller.api.integration.vcmr.VespaChangeRequest;
import com.yahoo.vespa.hosted.controller.api.integration.vcmr.VespaChangeRequest.Status;
import com.yahoo.vespa.hosted.controller.persistence.CuratorDb;
import com.yahoo.yolean.Exceptions;
import java.time.Duration;
import java.time.Instant;
import java.time.ZonedDateTime;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Predicate;
import java.util.logging.Logger;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
*
* Maintains status and execution of Vespa CMRs.
*
* Currently this retires all affected tenant hosts if zone capacity allows it.
*
* @author olaa
*/
public class VcmrMaintainer extends ControllerMaintainer {
private static final Logger LOG = Logger.getLogger(VcmrMaintainer.class.getName());
private static final int DAYS_TO_RETIRE = 2;
private static final Duration ALLOWED_POSTPONEMENT_TIME = Duration.ofDays(7);
private final CuratorDb curator;
private final NodeRepository nodeRepository;
private final ChangeRequestClient changeRequestClient;
private final SystemName system;
public VcmrMaintainer(Controller controller, Duration interval) {
super(controller, interval, null, SystemName.allOf(Predicate.not(SystemName::isPublic)));
this.curator = controller.curator();
this.nodeRepository = controller.serviceRegistry().configServer().nodeRepository();
this.changeRequestClient = controller.serviceRegistry().changeRequestClient();
this.system = controller.system();
}
@Override
protected double maintain() {
var changeRequests = curator.readChangeRequests()
.stream()
.filter(shouldUpdate())
.collect(Collectors.toList());
var nodesByZone = nodesByZone();
changeRequests.forEach(changeRequest -> {
var nodes = impactedNodes(nodesByZone, changeRequest);
var nextActions = getNextActions(nodes, changeRequest);
var status = getStatus(nextActions, changeRequest);
try (var lock = curator.lockChangeRequests()) {
// Read the vcmr again, in case the source status has been updated
curator.readChangeRequest(changeRequest.getId())
.ifPresent(vcmr -> {
var updatedVcmr = vcmr.withActionPlan(nextActions)
.withStatus(status);
curator.writeChangeRequest(updatedVcmr);
if (nodes.keySet().size() == 1)
approveChangeRequest(updatedVcmr);
});
}
});
return 1.0;
}
/**
* Status is based on:
* 1. Whether the source has reportedly closed the request
* 2. Whether any host requires operator action
* 3. Whether any host is pending/started/finished retirement
*/
private Status getStatus(List<HostAction> nextActions, VespaChangeRequest changeRequest) {
if (changeRequest.getChangeRequestSource().isClosed()) {
return Status.COMPLETED;
}
var byActionState = nextActions.stream().collect(Collectors.groupingBy(HostAction::getState, Collectors.counting()));
if (byActionState.getOrDefault(State.REQUIRES_OPERATOR_ACTION, 0L) > 0) {
return Status.REQUIRES_OPERATOR_ACTION;
}
if (byActionState.getOrDefault(State.OUT_OF_SYNC, 0L) > 0) {
return Status.OUT_OF_SYNC;
}
if (byActionState.getOrDefault(State.RETIRING, 0L) > 0) {
return Status.IN_PROGRESS;
}
if (Set.of(State.RETIRED, State.NONE).containsAll(byActionState.keySet())) {
return Status.READY;
}
if (byActionState.getOrDefault(State.PENDING_RETIREMENT, 0L) > 0) {
return Status.PENDING_ACTION;
}
return Status.NOOP;
}
private List<HostAction> getNextActions(Map<ZoneId, List<Node>> nodesByZone, VespaChangeRequest changeRequest) {
return nodesByZone.entrySet()
.stream()
.flatMap(entry -> {
var zone = entry.getKey();
var nodes = entry.getValue();
if (nodes.isEmpty()) {
return Stream.empty();
}
var spareCapacity = hasSpareCapacity(zone, nodes);
return nodes.stream().map(node -> nextAction(zone, node, changeRequest, spareCapacity));
}).collect(Collectors.toList());
}
// Get the superset of impacted hosts by looking at impacted switches
private Map<ZoneId, List<Node>> impactedNodes(Map<ZoneId, List<Node>> nodesByZone, VespaChangeRequest changeRequest) {
return nodesByZone.entrySet()
.stream()
.filter(entry -> entry.getValue().stream().anyMatch(isImpacted(changeRequest))) // Skip zones without impacted nodes
.collect(Collectors.toMap(
Map.Entry::getKey,
entry -> entry.getValue().stream().filter(isImpacted(changeRequest)).collect(Collectors.toList())
));
}
private Optional<HostAction> getPreviousAction(Node node, VespaChangeRequest changeRequest) {
return changeRequest.getHostActionPlan()
.stream()
.filter(hostAction -> hostAction.getHostname().equals(node.hostname().value()))
.findFirst();
}
private HostAction nextAction(ZoneId zoneId, Node node, VespaChangeRequest changeRequest, boolean spareCapacity) {
var hostAction = getPreviousAction(node, changeRequest)
.orElse(new HostAction(node.hostname().value(), State.NONE, Instant.now()));
if (changeRequest.getChangeRequestSource().isClosed()) {
LOG.fine(() -> changeRequest.getChangeRequestSource().getId() + " is closed, recycling " + node.hostname());
recycleNode(zoneId, node, hostAction);
removeReport(zoneId, changeRequest, node);
return hostAction.withState(State.COMPLETE);
}
if (isLowImpact(changeRequest))
return hostAction;
addReport(zoneId, changeRequest, node);
if (isOutOfSync(node, hostAction))
return hostAction.withState(State.OUT_OF_SYNC);
if (isPostponed(changeRequest, hostAction)) {
LOG.fine(() -> changeRequest.getChangeRequestSource().getId() + " is postponed, recycling " + node.hostname());
recycleNode(zoneId, node, hostAction);
return hostAction.withState(State.PENDING_RETIREMENT);
}
if (node.type() != NodeType.host || !spareCapacity) {
return hostAction.withState(State.REQUIRES_OPERATOR_ACTION);
}
if (shouldRetire(changeRequest, hostAction)) {
if (!node.wantToRetire()) {
LOG.info(Text.format("Retiring %s due to %s", node.hostname().value(), changeRequest.getChangeRequestSource().getId()));
// TODO: Remove try/catch once retirement is stabilized
try {
setWantToRetire(zoneId, node, true);
} catch (Exception e) {
LOG.warning("Failed to retire host " + node.hostname() + ": " + Exceptions.toMessageString(e));
// Check if retirement actually failed
if (!nodeRepository.getNode(zoneId, node.hostname().value()).wantToRetire()) {
return hostAction;
}
}
}
return hostAction.withState(State.RETIRING);
}
if (hasRetired(node, hostAction)) {
LOG.fine(() -> node.hostname() + " has retired");
return hostAction.withState(State.RETIRED);
}
if (pendingRetirement(node, hostAction)) {
LOG.fine(() -> node.hostname() + " is pending retirement");
return hostAction.withState(State.PENDING_RETIREMENT);
}
if (isFailed(node)) {
return hostAction.withState(State.NONE);
}
return hostAction;
}
// Dirty host iff the parked host was retired by this maintainer
private void recycleNode(ZoneId zoneId, Node node, HostAction hostAction) {
if (hostAction.getState() == State.RETIRED &&
node.state() == Node.State.parked) {
LOG.info("Setting " + node.hostname() + " to dirty");
nodeRepository.setState(zoneId, Node.State.dirty, node.hostname().value());
}
if (hostAction.getState() == State.RETIRING && node.wantToRetire()) {
try {
setWantToRetire(zoneId, node, false);
} catch (Exception ignored) {}
}
}
private boolean isPostponed(VespaChangeRequest changeRequest, HostAction action) {
return List.of(State.RETIRED, State.RETIRING).contains(action.getState()) &&
changeRequest.getChangeRequestSource().getPlannedStartTime()
.minus(ALLOWED_POSTPONEMENT_TIME)
.isAfter(ZonedDateTime.now());
}
private boolean shouldRetire(VespaChangeRequest changeRequest, HostAction action) {
return action.getState() == State.PENDING_RETIREMENT &&
getRetirementStartTime(changeRequest.getChangeRequestSource().getPlannedStartTime())
.isBefore(ZonedDateTime.now());
}
private boolean hasRetired(Node node, HostAction hostAction) {
return List.of(State.RETIRING, State.REQUIRES_OPERATOR_ACTION).contains(hostAction.getState()) &&
node.state() == Node.State.parked;
}
private boolean pendingRetirement(Node node, HostAction action) {
return List.of(State.NONE, State.REQUIRES_OPERATOR_ACTION).contains(action.getState())
&& node.state() == Node.State.active;
}
// Determines if node state is unexpected based on previous action taken
private boolean isOutOfSync(Node node, HostAction action) {
return action.getState() == State.RETIRED && node.state() != Node.State.parked ||
action.getState() == State.RETIRING && !node.wantToRetire();
}
private boolean isFailed(Node node) {
return node.state() == Node.State.failed ||
node.state() == Node.State.breakfixed;
}
private Map<ZoneId, List<Node>> nodesByZone() {
return controller().zoneRegistry()
.zones()
.reachable()
.in(Environment.prod)
.ids()
.stream()
.collect(Collectors.toMap(
zone -> zone,
zone -> nodeRepository.list(zone, NodeFilter.all())
));
}
private Predicate<Node> isImpacted(VespaChangeRequest changeRequest) {
return node -> changeRequest.getImpactedHosts().contains(node.hostname().value()) ||
node.switchHostname()
.map(switchHostname -> changeRequest.getImpactedSwitches().contains(switchHostname))
.orElse(false);
}
private Predicate<VespaChangeRequest> shouldUpdate() {
return changeRequest -> changeRequest.getStatus() != Status.COMPLETED;
}
private boolean isLowImpact(VespaChangeRequest changeRequest) {
return !List.of(Impact.HIGH, Impact.VERY_HIGH)
.contains(changeRequest.getImpact());
}
private boolean hasSpareCapacity(ZoneId zoneId, List<Node> nodes) {
var tenantHosts = nodes.stream()
.filter(node -> node.type() == NodeType.host)
.map(Node::hostname)
.collect(Collectors.toList());
return tenantHosts.isEmpty() ||
nodeRepository.isReplaceable(zoneId, tenantHosts);
}
private void setWantToRetire(ZoneId zoneId, Node node, boolean wantToRetire) {
nodeRepository.retire(zoneId, node.hostname().value(), wantToRetire, false);
}
private void approveChangeRequest(VespaChangeRequest changeRequest) {
if (!system.equals(SystemName.main))
return;
if (changeRequest.getStatus() == Status.REQUIRES_OPERATOR_ACTION)
return;
if (changeRequest.getApproval() != ChangeRequest.Approval.REQUESTED)
return;
LOG.info("Approving " + changeRequest.getChangeRequestSource().getId());
changeRequestClient.approveChangeRequest(changeRequest);
}
private void removeReport(ZoneId zoneId, VespaChangeRequest changeRequest, Node node) {
var report = VcmrReport.fromReports(node.reports());
if (report.removeVcmr(changeRequest.getChangeRequestSource().getId())) {
updateReport(zoneId, node, report);
}
}
private void addReport(ZoneId zoneId, VespaChangeRequest changeRequest, Node node) {
var report = VcmrReport.fromReports(node.reports());
var source = changeRequest.getChangeRequestSource();
if (report.addVcmr(source.getId(), source.getPlannedStartTime(), source.getPlannedEndTime())) {
updateReport(zoneId, node, report);
}
}
private void updateReport(ZoneId zoneId, Node node, VcmrReport report) {
LOG.fine(() -> Text.format("Updating report for %s: %s", node.hostname(), report));
nodeRepository.updateReports(zoneId, node.hostname().value(), report.toNodeReports());
}
// Calculate wanted retirement start time, ignoring weekends
// protected for testing
protected ZonedDateTime getRetirementStartTime(ZonedDateTime plannedStartTime) {
var time = plannedStartTime;
var days = 0;
while (days < DAYS_TO_RETIRE) {
time = time.minusDays(1);
if (time.getDayOfWeek().getValue() < 6) days++;
}
return time;
}
}
| controller-server/src/main/java/com/yahoo/vespa/hosted/controller/maintenance/VcmrMaintainer.java | // Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.vespa.hosted.controller.maintenance;
import com.yahoo.config.provision.Environment;
import com.yahoo.config.provision.NodeType;
import com.yahoo.config.provision.SystemName;
import com.yahoo.config.provision.zone.ZoneId;
import com.yahoo.text.Text;
import com.yahoo.vespa.hosted.controller.Controller;
import com.yahoo.vespa.hosted.controller.api.integration.configserver.Node;
import com.yahoo.vespa.hosted.controller.api.integration.configserver.NodeFilter;
import com.yahoo.vespa.hosted.controller.api.integration.configserver.NodeRepository;
import com.yahoo.vespa.hosted.controller.api.integration.vcmr.ChangeRequest;
import com.yahoo.vespa.hosted.controller.api.integration.vcmr.ChangeRequest.Impact;
import com.yahoo.vespa.hosted.controller.api.integration.vcmr.ChangeRequestClient;
import com.yahoo.vespa.hosted.controller.api.integration.vcmr.HostAction;
import com.yahoo.vespa.hosted.controller.api.integration.vcmr.HostAction.State;
import com.yahoo.vespa.hosted.controller.api.integration.vcmr.VcmrReport;
import com.yahoo.vespa.hosted.controller.api.integration.vcmr.VespaChangeRequest;
import com.yahoo.vespa.hosted.controller.api.integration.vcmr.VespaChangeRequest.Status;
import com.yahoo.vespa.hosted.controller.persistence.CuratorDb;
import com.yahoo.yolean.Exceptions;
import java.time.Duration;
import java.time.Instant;
import java.time.ZonedDateTime;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Predicate;
import java.util.logging.Logger;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
*
* Maintains status and execution of Vespa CMRs.
*
* Currently this retires all affected tenant hosts if zone capacity allows it.
*
* @author olaa
*/
public class VcmrMaintainer extends ControllerMaintainer {
private static final Logger LOG = Logger.getLogger(VcmrMaintainer.class.getName());
private static final int DAYS_TO_RETIRE = 2;
private static final Duration ALLOWED_POSTPONEMENT_TIME = Duration.ofDays(7);
private final CuratorDb curator;
private final NodeRepository nodeRepository;
private final ChangeRequestClient changeRequestClient;
private final SystemName system;
public VcmrMaintainer(Controller controller, Duration interval) {
super(controller, interval, null, SystemName.allOf(Predicate.not(SystemName::isPublic)));
this.curator = controller.curator();
this.nodeRepository = controller.serviceRegistry().configServer().nodeRepository();
this.changeRequestClient = controller.serviceRegistry().changeRequestClient();
this.system = controller.system();
}
@Override
protected double maintain() {
var changeRequests = curator.readChangeRequests()
.stream()
.filter(shouldUpdate())
.collect(Collectors.toList());
var nodesByZone = nodesByZone();
changeRequests.forEach(changeRequest -> {
var nodes = impactedNodes(nodesByZone, changeRequest);
var nextActions = getNextActions(nodes, changeRequest);
var status = getStatus(nextActions, changeRequest);
try (var lock = curator.lockChangeRequests()) {
// Read the vcmr again, in case the source status has been updated
curator.readChangeRequest(changeRequest.getId())
.ifPresent(vcmr -> {
var updatedVcmr = vcmr.withActionPlan(nextActions)
.withStatus(status);
curator.writeChangeRequest(updatedVcmr);
if (nodes.keySet().size() == 1)
approveChangeRequest(updatedVcmr);
});
}
});
return 1.0;
}
/**
* Status is based on:
* 1. Whether the source has reportedly closed the request
* 2. Whether any host requires operator action
* 3. Whether any host is pending/started/finished retirement
*/
private Status getStatus(List<HostAction> nextActions, VespaChangeRequest changeRequest) {
if (changeRequest.getChangeRequestSource().isClosed()) {
return Status.COMPLETED;
}
var byActionState = nextActions.stream().collect(Collectors.groupingBy(HostAction::getState, Collectors.counting()));
if (byActionState.getOrDefault(State.REQUIRES_OPERATOR_ACTION, 0L) > 0) {
return Status.REQUIRES_OPERATOR_ACTION;
}
if (byActionState.getOrDefault(State.OUT_OF_SYNC, 0L) > 0) {
return Status.OUT_OF_SYNC;
}
if (byActionState.getOrDefault(State.RETIRING, 0L) > 0) {
return Status.IN_PROGRESS;
}
if (Set.of(State.RETIRED, State.NONE).containsAll(byActionState.keySet())) {
return Status.READY;
}
if (byActionState.getOrDefault(State.PENDING_RETIREMENT, 0L) > 0) {
return Status.PENDING_ACTION;
}
return Status.NOOP;
}
private List<HostAction> getNextActions(Map<ZoneId, List<Node>> nodesByZone, VespaChangeRequest changeRequest) {
return nodesByZone.entrySet()
.stream()
.flatMap(entry -> {
var zone = entry.getKey();
var nodes = entry.getValue();
if (nodes.isEmpty()) {
return Stream.empty();
}
var spareCapacity = hasSpareCapacity(zone, nodes);
return nodes.stream().map(node -> nextAction(zone, node, changeRequest, spareCapacity));
}).collect(Collectors.toList());
}
// Get the superset of impacted hosts by looking at impacted switches
private Map<ZoneId, List<Node>> impactedNodes(Map<ZoneId, List<Node>> nodesByZone, VespaChangeRequest changeRequest) {
return nodesByZone.entrySet()
.stream()
.filter(entry -> entry.getValue().stream().anyMatch(isImpacted(changeRequest))) // Skip zones without impacted nodes
.collect(Collectors.toMap(
Map.Entry::getKey,
entry -> entry.getValue().stream().filter(isImpacted(changeRequest)).collect(Collectors.toList())
));
}
private Optional<HostAction> getPreviousAction(Node node, VespaChangeRequest changeRequest) {
return changeRequest.getHostActionPlan()
.stream()
.filter(hostAction -> hostAction.getHostname().equals(node.hostname().value()))
.findFirst();
}
private HostAction nextAction(ZoneId zoneId, Node node, VespaChangeRequest changeRequest, boolean spareCapacity) {
var hostAction = getPreviousAction(node, changeRequest)
.orElse(new HostAction(node.hostname().value(), State.NONE, Instant.now()));
if (changeRequest.getChangeRequestSource().isClosed()) {
LOG.fine(() -> changeRequest.getChangeRequestSource().getId() + " is closed, recycling " + node.hostname());
recycleNode(zoneId, node, hostAction);
removeReport(zoneId, changeRequest, node);
return hostAction.withState(State.COMPLETE);
}
if (isLowImpact(changeRequest))
return hostAction;
addReport(zoneId, changeRequest, node);
if (isOutOfSync(node, hostAction))
return hostAction.withState(State.OUT_OF_SYNC);
if (isPostponed(changeRequest, hostAction)) {
LOG.fine(() -> changeRequest.getChangeRequestSource().getId() + " is postponed, recycling " + node.hostname());
recycleNode(zoneId, node, hostAction);
return hostAction.withState(State.PENDING_RETIREMENT);
}
if (node.type() != NodeType.host || !spareCapacity) {
return hostAction.withState(State.REQUIRES_OPERATOR_ACTION);
}
if (shouldRetire(changeRequest, hostAction)) {
if (!node.wantToRetire()) {
LOG.info(Text.format("Retiring %s due to %s", node.hostname().value(), changeRequest.getChangeRequestSource().getId()));
// TODO: Remove try/catch once retirement is stabilized
try {
setWantToRetire(zoneId, node, true);
} catch (Exception e) {
LOG.warning("Failed to retire host " + node.hostname() + ": " + Exceptions.toMessageString(e));
// Check if retirement actually failed
if (!nodeRepository.getNode(zoneId, node.hostname().value()).wantToRetire()) {
return hostAction;
}
}
}
return hostAction.withState(State.RETIRING);
}
if (hasRetired(node, hostAction)) {
LOG.fine(() -> node.hostname() + " has retired");
return hostAction.withState(State.RETIRED);
}
if (pendingRetirement(node, hostAction)) {
LOG.fine(() -> node.hostname() + " is pending retirement");
return hostAction.withState(State.PENDING_RETIREMENT);
}
return hostAction;
}
// Dirty host iff the parked host was retired by this maintainer
private void recycleNode(ZoneId zoneId, Node node, HostAction hostAction) {
if (hostAction.getState() == State.RETIRED &&
node.state() == Node.State.parked) {
LOG.info("Setting " + node.hostname() + " to dirty");
nodeRepository.setState(zoneId, Node.State.dirty, node.hostname().value());
}
if (hostAction.getState() == State.RETIRING && node.wantToRetire()) {
try {
setWantToRetire(zoneId, node, false);
} catch (Exception ignored) {}
}
}
private boolean isPostponed(VespaChangeRequest changeRequest, HostAction action) {
return List.of(State.RETIRED, State.RETIRING).contains(action.getState()) &&
changeRequest.getChangeRequestSource().getPlannedStartTime()
.minus(ALLOWED_POSTPONEMENT_TIME)
.isAfter(ZonedDateTime.now());
}
private boolean shouldRetire(VespaChangeRequest changeRequest, HostAction action) {
return action.getState() == State.PENDING_RETIREMENT &&
getRetirementStartTime(changeRequest.getChangeRequestSource().getPlannedStartTime())
.isBefore(ZonedDateTime.now());
}
private boolean hasRetired(Node node, HostAction hostAction) {
return List.of(State.RETIRING, State.REQUIRES_OPERATOR_ACTION).contains(hostAction.getState()) &&
node.state() == Node.State.parked;
}
private boolean pendingRetirement(Node node, HostAction action) {
return List.of(State.NONE, State.REQUIRES_OPERATOR_ACTION).contains(action.getState())
&& node.state() == Node.State.active;
}
// Determines if node state is unexpected based on previous action taken
private boolean isOutOfSync(Node node, HostAction action) {
return action.getState() == State.RETIRED && node.state() != Node.State.parked ||
action.getState() == State.RETIRING && !node.wantToRetire();
}
private Map<ZoneId, List<Node>> nodesByZone() {
return controller().zoneRegistry()
.zones()
.reachable()
.in(Environment.prod)
.ids()
.stream()
.collect(Collectors.toMap(
zone -> zone,
zone -> nodeRepository.list(zone, NodeFilter.all())
));
}
private Predicate<Node> isImpacted(VespaChangeRequest changeRequest) {
return node -> changeRequest.getImpactedHosts().contains(node.hostname().value()) ||
node.switchHostname()
.map(switchHostname -> changeRequest.getImpactedSwitches().contains(switchHostname))
.orElse(false);
}
private Predicate<VespaChangeRequest> shouldUpdate() {
return changeRequest -> changeRequest.getStatus() != Status.COMPLETED;
}
private boolean isLowImpact(VespaChangeRequest changeRequest) {
return !List.of(Impact.HIGH, Impact.VERY_HIGH)
.contains(changeRequest.getImpact());
}
private boolean hasSpareCapacity(ZoneId zoneId, List<Node> nodes) {
var tenantHosts = nodes.stream()
.filter(node -> node.type() == NodeType.host)
.map(Node::hostname)
.collect(Collectors.toList());
return tenantHosts.isEmpty() ||
nodeRepository.isReplaceable(zoneId, tenantHosts);
}
private void setWantToRetire(ZoneId zoneId, Node node, boolean wantToRetire) {
nodeRepository.retire(zoneId, node.hostname().value(), wantToRetire, false);
}
private void approveChangeRequest(VespaChangeRequest changeRequest) {
if (!system.equals(SystemName.main))
return;
if (changeRequest.getStatus() == Status.REQUIRES_OPERATOR_ACTION)
return;
if (changeRequest.getApproval() != ChangeRequest.Approval.REQUESTED)
return;
LOG.info("Approving " + changeRequest.getChangeRequestSource().getId());
changeRequestClient.approveChangeRequest(changeRequest);
}
private void removeReport(ZoneId zoneId, VespaChangeRequest changeRequest, Node node) {
var report = VcmrReport.fromReports(node.reports());
if (report.removeVcmr(changeRequest.getChangeRequestSource().getId())) {
updateReport(zoneId, node, report);
}
}
private void addReport(ZoneId zoneId, VespaChangeRequest changeRequest, Node node) {
var report = VcmrReport.fromReports(node.reports());
var source = changeRequest.getChangeRequestSource();
if (report.addVcmr(source.getId(), source.getPlannedStartTime(), source.getPlannedEndTime())) {
updateReport(zoneId, node, report);
}
}
private void updateReport(ZoneId zoneId, Node node, VcmrReport report) {
LOG.fine(() -> Text.format("Updating report for %s: %s", node.hostname(), report));
nodeRepository.updateReports(zoneId, node.hostname().value(), report.toNodeReports());
}
// Calculate wanted retirement start time, ignoring weekends
// protected for testing
protected ZonedDateTime getRetirementStartTime(ZonedDateTime plannedStartTime) {
var time = plannedStartTime;
var days = 0;
while (days < DAYS_TO_RETIRE) {
time = time.minusDays(1);
if (time.getDayOfWeek().getValue() < 6) days++;
}
return time;
}
}
| Failed and breakfixed hosts are always noop in CMRs
| controller-server/src/main/java/com/yahoo/vespa/hosted/controller/maintenance/VcmrMaintainer.java | Failed and breakfixed hosts are always noop in CMRs |
|
Java | apache-2.0 | 493d7eda8be3d9bcbdd40581384c09e2ce8f6ddf | 0 | wso2/analytics-apim,wso2/analytics-apim,wso2/analytics-apim | /*
* Copyright (c) 2020, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.analytics.apim.rest.api.report.reportgen.util;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDPage;
import org.apache.pdfbox.pdmodel.edit.PDPageContentStream;
import org.apache.pdfbox.pdmodel.font.PDFont;
import org.apache.pdfbox.pdmodel.font.PDType1Font;
import org.apache.pdfbox.pdmodel.graphics.xobject.PDJpeg;
import org.wso2.analytics.apim.rest.api.report.impl.ReportApiServiceImpl;
import org.wso2.analytics.apim.rest.api.report.reportgen.model.RowEntry;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
* Util class for report generation.
*/
public class ReportGeneratorUtil {
private static final float ROW_HEIGHT = 25;
private static final float CELL_PADDING = 5;
private static final float CELL_MARGIN = 40; // margin on left side;
private static final float TABLE_WIDTH = 500;
private static final float TABLE_TOP_Y = 700;
// Font configuration
private static final PDFont TEXT_FONT = PDType1Font.HELVETICA;
private static final float FONT_SIZE = 9;
private static final float RECORD_COUNT_PER_PAGE = 25;
/**
* Get List of integers with the number of records in each page.
* @param numberOfRows total number of rows across the document.
* @return list of integers with the number of records. Each index represents the page number - 1.
*/
public static List<Integer> getRecordsPerPage(int numberOfRows) {
int numOfPages = (int) Math.ceil(numberOfRows / RECORD_COUNT_PER_PAGE);
List<Integer> recordCountPerPage = new ArrayList<>();
int remainingRows = numberOfRows;
if (numberOfRows < RECORD_COUNT_PER_PAGE) {
recordCountPerPage.add(numberOfRows);
return recordCountPerPage;
} else {
for (int i = 0; i < numOfPages; i++) {
if (remainingRows >= RECORD_COUNT_PER_PAGE) {
recordCountPerPage.add((int) RECORD_COUNT_PER_PAGE);
remainingRows -= RECORD_COUNT_PER_PAGE;
} else {
recordCountPerPage.add(remainingRows);
}
}
}
return recordCountPerPage;
}
/**
* Inserts page number onto the bottom center of the page.
* @param contentStream content stream of the page.
* @param pageNumber page number.
* @throws IOException
*/
public static void insertPageNumber(PDPageContentStream contentStream, int pageNumber) throws IOException {
contentStream.setFont(PDType1Font.HELVETICA_BOLD, FONT_SIZE);
contentStream.beginText();
contentStream.moveTextPositionByAmount((PDPage.PAGE_SIZE_A4.getUpperRightX() / 2),
(PDPage.PAGE_SIZE_A4.getLowerLeftY()) + ROW_HEIGHT);
contentStream.drawString(pageNumber + "");
contentStream.endText();
}
/**
* Inserts logo onto the top right of the page.
* @param document
* @param contentStream
* @throws IOException
*/
public static void insertLogo(PDDocument document, PDPageContentStream contentStream) throws IOException {
InputStream in = ReportApiServiceImpl.class.getResourceAsStream("/wso2-logo.jpg");
PDJpeg img = new PDJpeg(document, in);
contentStream.drawImage(img, 375, 755);
}
/**
* Inserts title to the page.
* @param contentStream
* @param title
* @throws IOException
*/
public static void insertReportTitleToHeader(PDPageContentStream contentStream, String title) throws IOException {
contentStream.setFont(PDType1Font.HELVETICA_BOLD, 16);
writeContent(contentStream, CELL_MARGIN, 790, title);
}
/**
* Inserts report period to the page.
* @param contentStream content stream of the page.
* @param period the time duration which should be printed below the title.
* @throws IOException
*/
public static void insertReportTimePeriodToHeader(PDPageContentStream contentStream, String period)
throws IOException {
contentStream.setFont(PDType1Font.HELVETICA_BOLD, 14);
writeContent(contentStream, CELL_MARGIN, 770, period);
}
/**
* Inserts total request count of the report on the header.
* @param contentStream content stream of the page.
* @param totalRequestCount total aggregated count.
* @throws IOException
*/
public static void insertTotalRequestCountToHeader(PDPageContentStream contentStream, Long totalRequestCount)
throws IOException {
contentStream.setFont(PDType1Font.HELVETICA_BOLD, FONT_SIZE);
writeContent(contentStream, CELL_MARGIN, 735, "Total Request count : " +
totalRequestCount.toString());
}
/**
* Inserts report generated time.
* @param contentStream content stream of the page.
* @throws IOException
*/
public static void insertReportGeneratedTimeToHeader(PDPageContentStream contentStream) throws IOException {
contentStream.setFont(PDType1Font.HELVETICA_BOLD, FONT_SIZE);
writeContent(contentStream, CELL_MARGIN, 750, "Report generated on : " + new Date().toString());
}
/**
*
* @param contentStream content stream of the page.
* @param positionX x-axis position.
* @param positionY y-axis position.
* @param text the content to write.
* @throws IOException
*/
public static void writeContent(PDPageContentStream contentStream, float positionX, float positionY, String text)
throws IOException {
contentStream.beginText();
contentStream.moveTextPositionByAmount(positionX, positionY);
// trimming the text which are longer than 20 characters
if (text != null && text.length() >= 20) {
text = text.substring(0, 20);
text = text + "...";
}
contentStream.drawString(text != null ? text : "");
contentStream.endText();
}
/**
* Prints a table with column headers and data.
* @param columnHeaders the table column headers.
* @param columnWidths widths of each column.
* @param document the document.
* @param pageMap page map with each page object stored against each page index.
* @param rowEntries list of rows.
* @throws IOException
*/
public static void writeRowsContent(String[] columnHeaders, float[] columnWidths, PDDocument document, Map<Integer,
PDPage> pageMap, List<RowEntry> rowEntries) throws IOException {
float startX = CELL_MARGIN + CELL_PADDING; // space between entry and the column line
float startY = TABLE_TOP_Y - (ROW_HEIGHT / 2)
- ((TEXT_FONT.getFontDescriptor().getFontBoundingBox().getHeight() / 1000 * FONT_SIZE) / 4);
PDPageContentStream contentStream = new PDPageContentStream(document, pageMap.get(1), true, false);
// write table column headers
writeColumnHeader(contentStream, columnWidths, startX, startY, columnHeaders);
startY -= ROW_HEIGHT;
startX = CELL_MARGIN + CELL_PADDING;
int currentPageNum = 1;
int rowNum = 0;
// write content
for (RowEntry entry : rowEntries) {
rowNum += 1;
if (rowNum > RECORD_COUNT_PER_PAGE) {
contentStream.close();
currentPageNum += 1;
contentStream = new PDPageContentStream(document, pageMap.get(currentPageNum), true, false);
contentStream.setFont(TEXT_FONT, FONT_SIZE);
startY = TABLE_TOP_Y - (ROW_HEIGHT / 2)
- ((TEXT_FONT.getFontDescriptor().getFontBoundingBox().getHeight() / 1000 * FONT_SIZE) / 4);
startX = CELL_MARGIN + CELL_PADDING;
rowNum = 1;
}
writeToRow(contentStream, columnWidths, startX, startY, entry);
startY -= ROW_HEIGHT;
startX = CELL_MARGIN + CELL_PADDING;
}
contentStream.close();
}
/**
* Writes a row.
* @param contentStream content stream of the page.
* @param columnWidths widths of each column.
* @param positionX x-axis position
* @param positionY y-axis position
* @param entry row data.
* @throws IOException
*/
public static void writeToRow(PDPageContentStream contentStream, float[] columnWidths, float positionX,
float positionY, RowEntry entry)
throws IOException {
for (int i = 0; i < columnWidths.length; i++) {
writeContent(contentStream, positionX, positionY, entry.getEntries().get(i));
positionX += columnWidths[i];
}
contentStream.close();
}
/**
* Writes the column header.
* @param contentStream content stream of the page.
* @param columnWidths widths of each column.
* @param positionX x-axis position
* @param positionY y-axis position
* @param content data to write in column header.
* @throws IOException
*/
public static void writeColumnHeader(PDPageContentStream contentStream, float[] columnWidths, float positionX,
float positionY, String[] content)
throws IOException {
contentStream.setFont(PDType1Font.HELVETICA_BOLD, FONT_SIZE);
for (int i = 0; i < columnWidths.length; i++) {
writeContent(contentStream, positionX, positionY, content[i]);
positionX += columnWidths[i];
}
contentStream.setFont(TEXT_FONT, FONT_SIZE);
contentStream.close();
}
/**
* Draws a table.
* @param document document to draw the table.
* @param pageMap map of page objects against page numbers.
* @param recordsPerPageList a list of integers with number of records for each page.
* @param columnWidths widths of the columns.
* @param numberOfRows total number of rows.
* @throws IOException
*/
public static void drawTableGrid(PDDocument document, Map<Integer,
PDPage> pageMap, List<Integer> recordsPerPageList, float[] columnWidths, int numberOfRows)
throws IOException {
float nextY = TABLE_TOP_Y;
// draw horizontal lines
int currentPageNum = 1;
PDPageContentStream contentStream = new PDPageContentStream(document, pageMap.get(currentPageNum), true,
false);
int rowNum = 0;
for (int i = 0; i <= numberOfRows + 1; i++) {
contentStream.drawLine(CELL_MARGIN, nextY, CELL_MARGIN + TABLE_WIDTH, nextY);
nextY -= ROW_HEIGHT;
if (rowNum > RECORD_COUNT_PER_PAGE) {
contentStream.close();
currentPageNum++;
contentStream = new PDPageContentStream(document, pageMap.get(currentPageNum), true, false);
insertPageNumber(contentStream, currentPageNum);
insertLogo(document, contentStream);
nextY = TABLE_TOP_Y;
rowNum = 0;
numberOfRows++; // at each new page add one more horizontal line
}
rowNum++;
}
contentStream.close();
// draw vertical lines
for (int k = 1; k <= pageMap.size(); k++) {
float tableYLength = (ROW_HEIGHT * (recordsPerPageList.get(k - 1)));
float tableBottomY = TABLE_TOP_Y - tableYLength;
if (k == 1) {
tableBottomY -= ROW_HEIGHT;
}
float nextX = CELL_MARGIN;
contentStream = new PDPageContentStream(document, pageMap.get(k), true, false);
for (float columnWidth : columnWidths) {
contentStream.drawLine(nextX, TABLE_TOP_Y, nextX, tableBottomY);
nextX += columnWidth;
}
contentStream.drawLine(CELL_MARGIN + TABLE_WIDTH, TABLE_TOP_Y, CELL_MARGIN + TABLE_WIDTH, tableBottomY);
contentStream.close();
}
}
/**
* Returns the number of pages in the document.
* @param numberOfRows number of records.
* @return number of pages in the document.
*/
public static int getNumberOfPages(int numberOfRows) {
return (int) Math.ceil(numberOfRows / RECORD_COUNT_PER_PAGE);
}
}
| components/org.wso2.analytics.apim.rest.api.report/src/main/java/org/wso2/analytics/apim/rest/api/report/reportgen/util/ReportGeneratorUtil.java | /*
* Copyright (c) 2020, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.analytics.apim.rest.api.report.reportgen.util;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDPage;
import org.apache.pdfbox.pdmodel.edit.PDPageContentStream;
import org.apache.pdfbox.pdmodel.font.PDFont;
import org.apache.pdfbox.pdmodel.font.PDType1Font;
import org.apache.pdfbox.pdmodel.graphics.xobject.PDJpeg;
import org.wso2.analytics.apim.rest.api.report.impl.ReportApiServiceImpl;
import org.wso2.analytics.apim.rest.api.report.reportgen.model.RowEntry;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
* Util class for report generation.
*/
public class ReportGeneratorUtil {
private static final float ROW_HEIGHT = 25;
private static final float CELL_PADDING = 10;
private static final float CELL_MARGIN = 40; // margin on left side;
private static final float TABLE_WIDTH = 500;
private static final float TABLE_TOP_Y = 700;
// Font configuration
private static final PDFont TEXT_FONT = PDType1Font.HELVETICA;
private static final float FONT_SIZE = 9;
private static final float RECORD_COUNT_PER_PAGE = 25;
/**
* Get List of integers with the number of records in each page.
* @param numberOfRows total number of rows across the document.
* @return list of integers with the number of records. Each index represents the page number - 1.
*/
public static List<Integer> getRecordsPerPage(int numberOfRows) {
int numOfPages = (int) Math.ceil(numberOfRows / RECORD_COUNT_PER_PAGE);
List<Integer> recordCountPerPage = new ArrayList<>();
int remainingRows = numberOfRows;
if (numberOfRows < RECORD_COUNT_PER_PAGE) {
recordCountPerPage.add(numberOfRows);
return recordCountPerPage;
} else {
for (int i = 0; i < numOfPages; i++) {
if (remainingRows >= RECORD_COUNT_PER_PAGE) {
recordCountPerPage.add((int) RECORD_COUNT_PER_PAGE);
remainingRows -= RECORD_COUNT_PER_PAGE;
} else {
recordCountPerPage.add(remainingRows);
}
}
}
return recordCountPerPage;
}
/**
* Inserts page number onto the bottom center of the page.
* @param contentStream content stream of the page.
* @param pageNumber page number.
* @throws IOException
*/
public static void insertPageNumber(PDPageContentStream contentStream, int pageNumber) throws IOException {
contentStream.setFont(PDType1Font.HELVETICA_BOLD, FONT_SIZE);
contentStream.beginText();
contentStream.moveTextPositionByAmount((PDPage.PAGE_SIZE_A4.getUpperRightX() / 2),
(PDPage.PAGE_SIZE_A4.getLowerLeftY()) + ROW_HEIGHT);
contentStream.drawString(pageNumber + "");
contentStream.endText();
}
/**
* Inserts logo onto the top right of the page.
* @param document
* @param contentStream
* @throws IOException
*/
public static void insertLogo(PDDocument document, PDPageContentStream contentStream) throws IOException {
InputStream in = ReportApiServiceImpl.class.getResourceAsStream("/wso2-logo.jpg");
PDJpeg img = new PDJpeg(document, in);
contentStream.drawImage(img, 375, 755);
}
/**
* Inserts title to the page.
* @param contentStream
* @param title
* @throws IOException
*/
public static void insertReportTitleToHeader(PDPageContentStream contentStream, String title) throws IOException {
contentStream.setFont(PDType1Font.HELVETICA_BOLD, 16);
writeContent(contentStream, CELL_MARGIN, 790, title);
}
/**
* Inserts report period to the page.
* @param contentStream content stream of the page.
* @param period the time duration which should be printed below the title.
* @throws IOException
*/
public static void insertReportTimePeriodToHeader(PDPageContentStream contentStream, String period)
throws IOException {
contentStream.setFont(PDType1Font.HELVETICA_BOLD, 14);
writeContent(contentStream, CELL_MARGIN, 770, period);
}
/**
* Inserts total request count of the report on the header.
* @param contentStream content stream of the page.
* @param totalRequestCount total aggregated count.
* @throws IOException
*/
public static void insertTotalRequestCountToHeader(PDPageContentStream contentStream, Long totalRequestCount)
throws IOException {
contentStream.setFont(PDType1Font.HELVETICA_BOLD, FONT_SIZE);
writeContent(contentStream, CELL_MARGIN, 735, "Total Request count : " +
totalRequestCount.toString());
}
/**
* Inserts report generated time.
* @param contentStream content stream of the page.
* @throws IOException
*/
public static void insertReportGeneratedTimeToHeader(PDPageContentStream contentStream) throws IOException {
contentStream.setFont(PDType1Font.HELVETICA_BOLD, FONT_SIZE);
writeContent(contentStream, CELL_MARGIN, 750, "Report generated on : " + new Date().toString());
}
/**
*
* @param contentStream content stream of the page.
* @param positionX x-axis position.
* @param positionY y-axis position.
* @param text the content to write.
* @throws IOException
*/
public static void writeContent(PDPageContentStream contentStream, float positionX, float positionY, String text)
throws IOException {
contentStream.beginText();
contentStream.moveTextPositionByAmount(positionX, positionY);
contentStream.drawString(text != null ? text : "");
contentStream.endText();
}
/**
* Prints a table with column headers and data.
* @param columnHeaders the table column headers.
* @param columnWidths widths of each column.
* @param document the document.
* @param pageMap page map with each page object stored against each page index.
* @param rowEntries list of rows.
* @throws IOException
*/
public static void writeRowsContent(String[] columnHeaders, float[] columnWidths, PDDocument document, Map<Integer,
PDPage> pageMap, List<RowEntry> rowEntries) throws IOException {
float startX = CELL_MARGIN + CELL_PADDING; // space between entry and the column line
float startY = TABLE_TOP_Y - (ROW_HEIGHT / 2)
- ((TEXT_FONT.getFontDescriptor().getFontBoundingBox().getHeight() / 1000 * FONT_SIZE) / 4);
PDPageContentStream contentStream = new PDPageContentStream(document, pageMap.get(1), true, false);
// write table column headers
writeColumnHeader(contentStream, columnWidths, startX, startY, columnHeaders);
startY -= ROW_HEIGHT;
startX = CELL_MARGIN + CELL_PADDING;
int currentPageNum = 1;
int rowNum = 0;
// write content
for (RowEntry entry : rowEntries) {
rowNum += 1;
if (rowNum > RECORD_COUNT_PER_PAGE) {
contentStream.close();
currentPageNum += 1;
contentStream = new PDPageContentStream(document, pageMap.get(currentPageNum), true, false);
contentStream.setFont(TEXT_FONT, FONT_SIZE);
startY = TABLE_TOP_Y - (ROW_HEIGHT / 2)
- ((TEXT_FONT.getFontDescriptor().getFontBoundingBox().getHeight() / 1000 * FONT_SIZE) / 4);
startX = CELL_MARGIN + CELL_PADDING;
rowNum = 1;
}
writeToRow(contentStream, columnWidths, startX, startY, entry);
startY -= ROW_HEIGHT;
startX = CELL_MARGIN + CELL_PADDING;
}
contentStream.close();
}
/**
* Writes a row.
* @param contentStream content stream of the page.
* @param columnWidths widths of each column.
* @param positionX x-axis position
* @param positionY y-axis position
* @param entry row data.
* @throws IOException
*/
public static void writeToRow(PDPageContentStream contentStream, float[] columnWidths, float positionX,
float positionY, RowEntry entry)
throws IOException {
for (int i = 0; i < columnWidths.length; i++) {
writeContent(contentStream, positionX, positionY, entry.getEntries().get(i));
positionX += columnWidths[i];
}
contentStream.close();
}
/**
* Writes the column header.
* @param contentStream content stream of the page.
* @param columnWidths widths of each column.
* @param positionX x-axis position
* @param positionY y-axis position
* @param content data to write in column header.
* @throws IOException
*/
public static void writeColumnHeader(PDPageContentStream contentStream, float[] columnWidths, float positionX,
float positionY, String[] content)
throws IOException {
contentStream.setFont(PDType1Font.HELVETICA_BOLD, FONT_SIZE);
for (int i = 0; i < columnWidths.length; i++) {
writeContent(contentStream, positionX, positionY, content[i]);
positionX += columnWidths[i];
}
contentStream.setFont(TEXT_FONT, FONT_SIZE);
contentStream.close();
}
/**
* Draws a table.
* @param document document to draw the table.
* @param pageMap map of page objects against page numbers.
* @param recordsPerPageList a list of integers with number of records for each page.
* @param columnWidths widths of the columns.
* @param numberOfRows total number of rows.
* @throws IOException
*/
public static void drawTableGrid(PDDocument document, Map<Integer,
PDPage> pageMap, List<Integer> recordsPerPageList, float[] columnWidths, int numberOfRows)
throws IOException {
float nextY = TABLE_TOP_Y;
// draw horizontal lines
int currentPageNum = 1;
PDPageContentStream contentStream = new PDPageContentStream(document, pageMap.get(currentPageNum), true,
false);
int rowNum = 0;
for (int i = 0; i <= numberOfRows + 1; i++) {
contentStream.drawLine(CELL_MARGIN, nextY, CELL_MARGIN + TABLE_WIDTH, nextY);
nextY -= ROW_HEIGHT;
if (rowNum > RECORD_COUNT_PER_PAGE) {
contentStream.close();
currentPageNum++;
contentStream = new PDPageContentStream(document, pageMap.get(currentPageNum), true, false);
insertPageNumber(contentStream, currentPageNum);
insertLogo(document, contentStream);
nextY = TABLE_TOP_Y;
rowNum = 0;
numberOfRows++; // at each new page add one more horizontal line
}
rowNum++;
}
contentStream.close();
// draw vertical lines
for (int k = 1; k <= pageMap.size(); k++) {
float tableYLength = (ROW_HEIGHT * (recordsPerPageList.get(k - 1)));
float tableBottomY = TABLE_TOP_Y - tableYLength;
if (k == 1) {
tableBottomY -= ROW_HEIGHT;
}
float nextX = CELL_MARGIN;
contentStream = new PDPageContentStream(document, pageMap.get(k), true, false);
for (float columnWidth : columnWidths) {
contentStream.drawLine(nextX, TABLE_TOP_Y, nextX, tableBottomY);
nextX += columnWidth;
}
contentStream.drawLine(CELL_MARGIN + TABLE_WIDTH, TABLE_TOP_Y, CELL_MARGIN + TABLE_WIDTH, tableBottomY);
contentStream.close();
}
}
/**
* Returns the number of pages in the document.
* @param numberOfRows number of records.
* @return number of pages in the document.
*/
public static int getNumberOfPages(int numberOfRows) {
return (int) Math.ceil(numberOfRows / RECORD_COUNT_PER_PAGE);
}
}
| Fix pdf string length
| components/org.wso2.analytics.apim.rest.api.report/src/main/java/org/wso2/analytics/apim/rest/api/report/reportgen/util/ReportGeneratorUtil.java | Fix pdf string length |
|
Java | apache-2.0 | 4eda1c85ddba33335898203194a43e5ed0830043 | 0 | hs-web/hsweb-framework,hs-web/hsweb-framework,hs-web/hsweb-framework | package org.hswebframework.web.dao.mybatis.mapper;
import lombok.extern.slf4j.Slf4j;
import org.hswebframework.ezorm.core.param.Term;
import org.hswebframework.ezorm.rdb.meta.RDBColumnMetaData;
import org.hswebframework.ezorm.rdb.render.SqlAppender;
import org.hswebframework.ezorm.rdb.render.dialect.Dialect;
import org.hswebframework.ezorm.rdb.render.dialect.RenderPhase;
import org.hswebframework.ezorm.rdb.render.dialect.function.SqlFunction;
import org.hswebframework.ezorm.rdb.render.dialect.term.BoostTermTypeMapper;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
/**
* @author zhouhao
* @since 3.0.0-RC
*/
@Slf4j
public abstract class TreeStructureSqlTermCustomizer extends AbstractSqlTermCustomizer {
boolean not = false;
boolean parent = false;
public TreeStructureSqlTermCustomizer(String termType, boolean not, boolean parent) {
super(termType);
this.not = not;
}
protected abstract String getTableName();
protected abstract List<String> getTreePathByTerm(List<Object> termValue);
@Override
public SqlAppender accept(String wherePrefix, Term term, RDBColumnMetaData column, String tableAlias) {
ChangedTermValue termValue = createChangedTermValue(term);
Dialect dialect = column.getTableMetaData().getDatabaseMetaData().getDialect();
List<String> paths;
if (termValue.getOld() == termValue.getValue()) {
List<Object> value = BoostTermTypeMapper.convertList(column, termValue.getOld());
paths = getTreePathByTerm(value)
.stream()
.map(path -> path.concat("%"))
.collect(Collectors.toList());
termValue.setValue(paths);
} else {
paths = ((List) termValue.getValue());
}
SqlAppender termCondition = new SqlAppender();
termCondition.add(not ? "not " : "", "exists(select 1 from ", getTableName(), " tmp where tmp.u_id = ", createColumnName(column, tableAlias));
int len = paths.size();
if (len > 0) {
termCondition.add(" and (");
}
for (int i = 0; i < len; i++) {
if (i > 0) {
termCondition.addSpc(" or");
}
if (parent) {
SqlFunction function = dialect.getFunction(SqlFunction.concat);
String concat;
if (function == null) {
concat = getTableName() + ".path";
log.warn("数据库方言未支持concat函数,你可以调用Dialect.installFunction进行设置!");
} else {
concat = function.apply(SqlFunction.Param.of(RenderPhase.where, Arrays.asList("tmp.path", "'%'")));
}
termCondition.add("#{", wherePrefix, ".value.value[", i, "]}", " like ", concat);
} else {
termCondition.add("tmp.path like #{", wherePrefix, ".value.value[", i, "]}");
}
}
if (len > 0) {
termCondition.add(")");
}
termCondition.add(")");
return termCondition;
}
}
| hsweb-commons/hsweb-commons-dao/hsweb-commons-dao-mybatis/src/main/java/org/hswebframework/web/dao/mybatis/mapper/TreeStructureSqlTermCustomizer.java | package org.hswebframework.web.dao.mybatis.mapper;
import lombok.extern.slf4j.Slf4j;
import org.hswebframework.ezorm.core.param.Term;
import org.hswebframework.ezorm.rdb.meta.RDBColumnMetaData;
import org.hswebframework.ezorm.rdb.render.SqlAppender;
import org.hswebframework.ezorm.rdb.render.dialect.Dialect;
import org.hswebframework.ezorm.rdb.render.dialect.RenderPhase;
import org.hswebframework.ezorm.rdb.render.dialect.function.SqlFunction;
import org.hswebframework.ezorm.rdb.render.dialect.term.BoostTermTypeMapper;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
/**
* @author zhouhao
* @since 3.0.0-RC
*/
@Slf4j
public abstract class TreeStructureSqlTermCustomizer extends AbstractSqlTermCustomizer {
boolean not = false;
boolean parent = false;
public TreeStructureSqlTermCustomizer(String termType, boolean not, boolean parent) {
super(termType);
this.not = not;
}
protected abstract String getTableName();
protected abstract List<String> getTreePathByTerm(List<Object> termValue);
@Override
public SqlAppender accept(String wherePrefix, Term term, RDBColumnMetaData column, String tableAlias) {
ChangedTermValue termValue = createChangedTermValue(term);
Dialect dialect = column.getTableMetaData().getDatabaseMetaData().getDialect();
List<String> paths;
if (termValue.getOld() == termValue.getValue()) {
List<Object> value = BoostTermTypeMapper.convertList(column, termValue.getOld());
paths = getTreePathByTerm(value)
.stream()
.map(path -> path.concat("%"))
.collect(Collectors.toList());
termValue.setValue(paths);
} else {
paths = ((List) termValue.getValue());
}
SqlAppender termCondition = new SqlAppender();
termCondition.add(not ? "not " : "", "exists(select 1 from ", getTableName(), " tmp where tmp.u_id = ", createColumnName(column, tableAlias));
int len = paths.size();
if (len > 0) {
termCondition.add(" and (");
}
for (int i = 0; i < len; i++) {
if (i > 0) {
termCondition.addSpc("or");
}
if (parent) {
SqlFunction function = dialect.getFunction(SqlFunction.concat);
String concat;
if (function == null) {
concat = getTableName() + ".path";
log.warn("数据库方言未支持concat函数,你可以调用Dialect.installFunction进行设置!");
} else {
concat = function.apply(SqlFunction.Param.of(RenderPhase.where, Arrays.asList("tmp.path", "'%'")));
}
termCondition.add("#{", wherePrefix, ".value.value[", i, "]}", " like ", concat);
} else {
termCondition.add("tmp.path like #{", wherePrefix, ".value.value[", i, "]}");
}
}
if (len > 0) {
termCondition.add(")");
}
termCondition.add(")");
return termCondition;
}
}
| 优化sql
| hsweb-commons/hsweb-commons-dao/hsweb-commons-dao-mybatis/src/main/java/org/hswebframework/web/dao/mybatis/mapper/TreeStructureSqlTermCustomizer.java | 优化sql |
|
Java | apache-2.0 | b6117f0735b93407052c45b4109e5311a040c232 | 0 | porcelli-forks/kie-wb-common,ederign/kie-wb-common,droolsjbpm/kie-wb-common,jhrcek/kie-wb-common,jhrcek/kie-wb-common,manstis/kie-wb-common,porcelli-forks/kie-wb-common,psiroky/kie-wb-common,romartin/kie-wb-common,romartin/kie-wb-common,scandihealth/kie-wb-common,jhrcek/kie-wb-common,dgutierr/kie-wb-common,manstis/kie-wb-common,scandihealth/kie-wb-common,ederign/kie-wb-common,nmirasch/kie-wb-common,droolsjbpm/kie-wb-common,ederign/kie-wb-common,porcelli-forks/kie-wb-common,cristianonicolai/kie-wb-common,manstis/kie-wb-common,dgutierr/kie-wb-common,romartin/kie-wb-common,jhrcek/kie-wb-common,romartin/kie-wb-common,manstis/kie-wb-common,porcelli-forks/kie-wb-common,cristianonicolai/kie-wb-common,ederign/kie-wb-common,romartin/kie-wb-common,manstis/kie-wb-common | package org.kie.workbench.common.screens.explorer.client.widgets.navigator;
import java.util.List;
import java.util.Map;
import javax.annotation.PostConstruct;
import javax.enterprise.context.Dependent;
import javax.inject.Inject;
import org.guvnor.structure.client.resources.NavigatorResources;
import org.jboss.errai.security.shared.api.identity.User;
import org.kie.workbench.common.screens.explorer.client.resources.i18n.ProjectExplorerConstants;
import org.kie.workbench.common.screens.explorer.client.widgets.ViewPresenter;
import org.kie.workbench.common.screens.explorer.model.FolderItem;
import org.kie.workbench.common.screens.explorer.model.FolderItemType;
import org.kie.workbench.common.screens.explorer.model.FolderListing;
import org.kie.workbench.common.widgets.client.resources.i18n.CommonConstants;
import org.uberfire.backend.vfs.Path;
import org.uberfire.mvp.Command;
import org.uberfire.mvp.ParameterizedCommand;
import org.uberfire.workbench.type.DotResourceTypeDefinition;
import com.github.gwtbootstrap.client.ui.Icon;
import com.github.gwtbootstrap.client.ui.Tooltip;
import com.github.gwtbootstrap.client.ui.constants.IconType;
import com.github.gwtbootstrap.client.ui.constants.Placement;
import com.google.gwt.dom.client.Style;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.user.client.ui.Anchor;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.FlexTable;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.InlineHTML;
@Dependent
public class BreadcrumbNavigator extends Composite implements Navigator {
@Inject
private DotResourceTypeDefinition hiddenTypeDef;
@Inject
private User user;
private FolderListing activeContent;
private final FlowPanel container = new FlowPanel();
private final FlexTable navigator = new FlexTable() {{
setStyleName( NavigatorResources.INSTANCE.css().navigator() );
}};
private NavigatorOptions options = new NavigatorOptions();
private ViewPresenter presenter;
@PostConstruct
public void init() {
initWidget( container );
}
@Override
public void setOptions( final NavigatorOptions options ) {
this.options = options;
}
@Override
public void loadContent( final FolderListing content,
final Map<FolderItem, List<FolderItem>> siblings ) {
loadContent( content );
}
@Override
public void loadContent( final FolderListing content ) {
if ( content != null ) {
if ( content.equals( activeContent ) ) {
return;
}
activeContent = content;
container.clear();
navigator.removeAllRows();
setupBreadcrumb( content );
setupUpFolder( content );
setupContent( content );
container.add( navigator );
}
}
@Override
public void clear() {
}
@Override
public void setPresenter( final ViewPresenter presenter ) {
this.presenter = presenter;
}
private void setupBreadcrumb( final FolderListing content ) {
container.add( new NavigatorBreadcrumbs( NavigatorBreadcrumbs.Mode.SECOND_LEVEL ) {{
build( content.getSegments(), content.getItem(), new ParameterizedCommand<FolderItem>() {
@Override
public void execute( final FolderItem item ) {
presenter.activeFolderItemSelected( item );
}
} );
}} );
}
private void setupContent( final FolderListing content ) {
int base = navigator.getRowCount();
for ( int i = 0; i < content.getContent().size(); i++ ) {
final FolderItem folderItem = content.getContent().get( i );
if ( folderItem.getType().equals( FolderItemType.FOLDER ) && options.showDirectories() ) {
createDirectory( base + i, folderItem );
} else if ( folderItem.getType().equals( FolderItemType.FILE ) && options.showFiles() ) {
if ( !options.showHiddenFiles() && !hiddenTypeDef.accept( (Path) folderItem.getItem() ) ) {
createFile( base + i, folderItem );
} else if ( options.showHiddenFiles() ) {
createFile( base + i, folderItem );
}
}
}
}
private void setupUpFolder( final FolderListing content ) {
if ( options.allowUpLink() ) {
if ( content.getSegments().size() > 0 ) {
createUpFolder( content.getSegments().get( content.getSegments().size() - 1 ) );
}
}
}
private void createFile( final int row,
final FolderItem folderItem ) {
createElement( row, folderItem, IconType.FILE_ALT, NavigatorResources.INSTANCE.css().navigatoFileIcon(), new Command() {
@Override
public void execute() {
presenter.itemSelected( folderItem );
}
} );
}
private void createDirectory( final int row,
final FolderItem folderItem ) {
createElement( row, folderItem, IconType.FOLDER_CLOSE, NavigatorResources.INSTANCE.css().navigatorFolderIcon(), new Command() {
@Override
public void execute() {
presenter.activeFolderItemSelected( folderItem );
}
} );
}
private void createUpFolder( final FolderItem item ) {
int col = 0;
navigator.setText( 0, col, "" );
navigator.setText( 0, ++col, "" );
navigator.setWidget( 0, ++col, new Anchor( ".." ) {{
addClickHandler( new ClickHandler() {
@Override
public void onClick( ClickEvent event ) {
presenter.activeFolderItemSelected( item );
}
} );
}} );
navigator.setText( 0, ++col, "" );
if ( options.showItemAge() ) {
navigator.setText( 0, ++col, "" );
}
if ( options.showItemMessage() ) {
navigator.setText( 0, ++col, "" );
}
}
private void createElement( final int row,
final FolderItem folderItem,
final IconType iconType,
final String style,
final Command onClick ) {
final Boolean locked = (folderItem.getLockedBy() != null);
final Boolean lockOwned = (locked && folderItem.getLockedBy().equals( user.getIdentifier() ));
int col = 0;
navigator.setWidget( row, col, new Icon( iconType ) {{
addStyleName( style );
}} );
col++;
if ( locked ) {
final InlineHTML lock = new InlineHTML( "<i class=\"icon-lock\"" + ((lockOwned) ? "style=\"color:#0083d0\"" : "") + "></i>" );
navigator.setWidget( row,
col,
lock );
new Tooltip() {
{
setWidget( lock );
setText( (lockOwned) ? ProjectExplorerConstants.INSTANCE.lockOwnedHint() :
ProjectExplorerConstants.INSTANCE.lockHint() + " " + folderItem.getLockedBy() );
setPlacement( Placement.TOP );
setShowDelay( 1000 );
reconfigure();
}
};
}
navigator.setWidget( row, ++col, new Anchor( folderItem.getFileName().replaceAll( " ", "\u00a0" ) ) {{
addClickHandler( new ClickHandler() {
@Override
public void onClick( ClickEvent event ) {
onClick.execute();
}
} );
}} );
final FlowPanel iconContainer = new FlowPanel();
final InlineHTML copyContainer = new InlineHTML( "<i class=\"icon-copy\"></i>" );
copyContainer.addClickHandler( new ClickHandler() {
@Override
public void onClick( ClickEvent event ) {
presenter.copyItem( folderItem );
}
} );
final InlineHTML renameContainer = new InlineHTML( getRenameIcon( locked && !lockOwned ) );
renameContainer.addClickHandler( new ClickHandler() {
@Override
public void onClick( ClickEvent event ) {
if (!locked) {
presenter.renameItem( folderItem );
}
}
} );
renameContainer.getElement().getStyle().setPaddingLeft( 10, Style.Unit.PX );
final InlineHTML deleteContainer = new InlineHTML( getDeleteIcon( locked && !lockOwned ) );
deleteContainer.addClickHandler( new ClickHandler() {
@Override
public void onClick( ClickEvent event ) {
if (!locked) {
presenter.deleteItem( folderItem );
}
}
} );
deleteContainer.getElement().getStyle().setPaddingLeft( 10, Style.Unit.PX );
iconContainer.add( copyContainer );
iconContainer.add( renameContainer );
iconContainer.add( deleteContainer );
if (folderItem.getType().equals(FolderItemType.FOLDER)) {
final InlineHTML archiveContainer = new InlineHTML("<i class=\"icon-archive\"></i>");
archiveContainer.addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
presenter.uploadArchivedFolder( folderItem );
}
});
archiveContainer.getElement().getStyle().setPaddingLeft(10, Style.Unit.PX);
iconContainer.add(archiveContainer);
new Tooltip() {{
setWidget( archiveContainer );
setText( CommonConstants.INSTANCE.Archive() );
setPlacement( Placement.TOP );
setShowDelay( 1000 );
reconfigure();
}};
}
new Tooltip() {{
setWidget( copyContainer );
setText( CommonConstants.INSTANCE.Copy() );
setPlacement( Placement.TOP );
setShowDelay( 1000 );
reconfigure();
}};
new Tooltip() {{
setWidget( renameContainer );
setText( CommonConstants.INSTANCE.Rename() );
setPlacement( Placement.TOP );
setShowDelay( 1000 );
reconfigure();
}};
new Tooltip() {{
setWidget( deleteContainer );
setText( CommonConstants.INSTANCE.Delete() );
setPlacement( Placement.TOP );
setShowDelay( 1000 );
reconfigure();
}};
navigator.setWidget( row, ++col, iconContainer );
}
private String getRenameIcon( boolean allowed ) {
String icon = "<i class=\"icon-font\"></i>";
return (allowed) ? ban( icon ) : icon;
};
private String getDeleteIcon( boolean allowed ) {
String icon = "<i class=\"icon-trash\"></i>";
return (allowed) ? ban( icon ) : icon;
}
private String ban(String icon) {
return "<span class=\"icon-stack\">" + icon +
"<i class=\"icon-ban-circle icon-stack-base\"></i></span>";
}
} | kie-wb-common-screens/kie-wb-common-project-explorer/kie-wb-common-project-explorer-client/src/main/java/org/kie/workbench/common/screens/explorer/client/widgets/navigator/BreadcrumbNavigator.java | package org.kie.workbench.common.screens.explorer.client.widgets.navigator;
import java.util.List;
import java.util.Map;
import javax.annotation.PostConstruct;
import javax.enterprise.context.Dependent;
import javax.inject.Inject;
import com.github.gwtbootstrap.client.ui.Icon;
import com.github.gwtbootstrap.client.ui.Tooltip;
import com.github.gwtbootstrap.client.ui.constants.IconType;
import com.github.gwtbootstrap.client.ui.constants.Placement;
import com.google.gwt.dom.client.Style;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.user.client.ui.Anchor;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.FlexTable;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.InlineHTML;
import org.guvnor.structure.client.resources.NavigatorResources;
import org.kie.workbench.common.screens.explorer.client.widgets.ViewPresenter;
import org.kie.workbench.common.screens.explorer.model.FolderItem;
import org.kie.workbench.common.screens.explorer.model.FolderItemType;
import org.kie.workbench.common.screens.explorer.model.FolderListing;
import org.kie.workbench.common.widgets.client.resources.i18n.CommonConstants;
import org.uberfire.backend.vfs.Path;
import org.uberfire.mvp.Command;
import org.uberfire.mvp.ParameterizedCommand;
import org.uberfire.workbench.type.DotResourceTypeDefinition;
@Dependent
public class BreadcrumbNavigator extends Composite implements Navigator {
@Inject
private DotResourceTypeDefinition hiddenTypeDef;
private FolderListing activeContent;
private final FlowPanel container = new FlowPanel();
private final FlexTable navigator = new FlexTable() {{
setStyleName( NavigatorResources.INSTANCE.css().navigator() );
}};
private NavigatorOptions options = new NavigatorOptions();
private ViewPresenter presenter;
@PostConstruct
public void init() {
initWidget( container );
}
@Override
public void setOptions( final NavigatorOptions options ) {
this.options = options;
}
@Override
public void loadContent( final FolderListing content,
final Map<FolderItem, List<FolderItem>> siblings ) {
loadContent( content );
}
@Override
public void loadContent( final FolderListing content ) {
if ( content != null ) {
if ( content.equals( activeContent ) ) {
return;
}
activeContent = content;
container.clear();
navigator.removeAllRows();
setupBreadcrumb( content );
setupUpFolder( content );
setupContent( content );
container.add( navigator );
}
}
@Override
public void clear() {
}
@Override
public void setPresenter( final ViewPresenter presenter ) {
this.presenter = presenter;
}
private void setupBreadcrumb( final FolderListing content ) {
container.add( new NavigatorBreadcrumbs( NavigatorBreadcrumbs.Mode.SECOND_LEVEL ) {{
build( content.getSegments(), content.getItem(), new ParameterizedCommand<FolderItem>() {
@Override
public void execute( final FolderItem item ) {
presenter.activeFolderItemSelected( item );
}
} );
}} );
}
private void setupContent( final FolderListing content ) {
int base = navigator.getRowCount();
for ( int i = 0; i < content.getContent().size(); i++ ) {
final FolderItem folderItem = content.getContent().get( i );
if ( folderItem.getType().equals( FolderItemType.FOLDER ) && options.showDirectories() ) {
createDirectory( base + i, folderItem );
} else if ( folderItem.getType().equals( FolderItemType.FILE ) && options.showFiles() ) {
if ( !options.showHiddenFiles() && !hiddenTypeDef.accept( (Path) folderItem.getItem() ) ) {
createFile( base + i, folderItem );
} else if ( options.showHiddenFiles() ) {
createFile( base + i, folderItem );
}
}
}
}
private void setupUpFolder( final FolderListing content ) {
if ( options.allowUpLink() ) {
if ( content.getSegments().size() > 0 ) {
createUpFolder( content.getSegments().get( content.getSegments().size() - 1 ) );
}
}
}
private void createFile( final int row,
final FolderItem folderItem ) {
createElement( row, folderItem, IconType.FILE_ALT, NavigatorResources.INSTANCE.css().navigatoFileIcon(), new Command() {
@Override
public void execute() {
presenter.itemSelected( folderItem );
}
} );
}
private void createDirectory( final int row,
final FolderItem folderItem ) {
createElement( row, folderItem, IconType.FOLDER_CLOSE, NavigatorResources.INSTANCE.css().navigatorFolderIcon(), new Command() {
@Override
public void execute() {
presenter.activeFolderItemSelected( folderItem );
}
} );
}
private void createUpFolder( final FolderItem item ) {
int col = 0;
navigator.setText( 0, col, "" );
navigator.setWidget( 0, ++col, new Anchor( ".." ) {{
addClickHandler( new ClickHandler() {
@Override
public void onClick( ClickEvent event ) {
presenter.activeFolderItemSelected( item );
}
} );
}} );
navigator.setText( 0, ++col, "" );
if ( options.showItemAge() ) {
navigator.setText( 0, ++col, "" );
}
if ( options.showItemMessage() ) {
navigator.setText( 0, ++col, "" );
}
}
private void createElement( final int row,
final FolderItem folderItem,
final IconType iconType,
final String style,
final Command onClick ) {
int col = 0;
navigator.setWidget( row, col, new Icon( iconType ) {{
addStyleName( style );
}} );
navigator.setWidget( row, ++col, new Anchor( folderItem.getFileName().replaceAll( " ", "\u00a0" ) ) {{
addClickHandler( new ClickHandler() {
@Override
public void onClick( ClickEvent event ) {
onClick.execute();
}
} );
}} );
final FlowPanel iconContainer = new FlowPanel();
final InlineHTML copyContainer = new InlineHTML( "<i class=\"icon-copy\"></i>" );
copyContainer.addClickHandler( new ClickHandler() {
@Override
public void onClick( ClickEvent event ) {
presenter.copyItem( folderItem );
}
} );
final InlineHTML renameContainer = new InlineHTML( "<i class=\"icon-font\"></i>" );
renameContainer.addClickHandler( new ClickHandler() {
@Override
public void onClick( ClickEvent event ) {
presenter.renameItem( folderItem );
}
} );
renameContainer.getElement().getStyle().setPaddingLeft( 10, Style.Unit.PX );
final InlineHTML deleteContainer = new InlineHTML( "<i class=\"icon-trash\"></i>" );
deleteContainer.addClickHandler( new ClickHandler() {
@Override
public void onClick( ClickEvent event ) {
presenter.deleteItem( folderItem );
}
} );
deleteContainer.getElement().getStyle().setPaddingLeft( 10, Style.Unit.PX );
iconContainer.add( copyContainer );
iconContainer.add( renameContainer );
iconContainer.add( deleteContainer );
if (folderItem.getType().equals(FolderItemType.FOLDER)) {
final InlineHTML archiveContainer = new InlineHTML("<i class=\"icon-archive\"></i>");
archiveContainer.addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
presenter.uploadArchivedFolder( folderItem );
}
});
archiveContainer.getElement().getStyle().setPaddingLeft(10, Style.Unit.PX);
iconContainer.add(archiveContainer);
new Tooltip() {{
setWidget( archiveContainer );
setText( CommonConstants.INSTANCE.Archive() );
setPlacement( Placement.TOP );
setShowDelay( 1000 );
reconfigure();
}};
}
new Tooltip() {{
setWidget( copyContainer );
setText( CommonConstants.INSTANCE.Copy() );
setPlacement( Placement.TOP );
setShowDelay( 1000 );
reconfigure();
}};
new Tooltip() {{
setWidget( renameContainer );
setText( CommonConstants.INSTANCE.Rename() );
setPlacement( Placement.TOP );
setShowDelay( 1000 );
reconfigure();
}};
new Tooltip() {{
setWidget( deleteContainer );
setText( CommonConstants.INSTANCE.Delete() );
setPlacement( Placement.TOP );
setShowDelay( 1000 );
reconfigure();
}};
navigator.setWidget( row, ++col, iconContainer );
}
} | BPMSPL-102: +Keep repository/links view in sync with lock state
| kie-wb-common-screens/kie-wb-common-project-explorer/kie-wb-common-project-explorer-client/src/main/java/org/kie/workbench/common/screens/explorer/client/widgets/navigator/BreadcrumbNavigator.java | BPMSPL-102: +Keep repository/links view in sync with lock state |
|
Java | apache-2.0 | c4f788d5a140bcc14bf9782f4021c0af4712f075 | 0 | prasannapramod/apex-malhar,yogidevendra/apex-malhar,ananthc/apex-malhar,tweise/incubator-apex-malhar,tweise/apex-malhar,vrozov/incubator-apex-malhar,siyuanh/incubator-apex-malhar,yogidevendra/incubator-apex-malhar,ananthc/apex-malhar,siyuanh/apex-malhar,apache/incubator-apex-malhar,patilvikram/apex-malhar,tweise/apex-malhar,siyuanh/incubator-apex-malhar,yogidevendra/incubator-apex-malhar,prasannapramod/apex-malhar,trusli/apex-malhar,apache/incubator-apex-malhar,davidyan74/apex-malhar,vrozov/incubator-apex-malhar,yogidevendra/apex-malhar,vrozov/incubator-apex-malhar,tushargosavi/incubator-apex-malhar,prasannapramod/apex-malhar,trusli/apex-malhar,vrozov/incubator-apex-malhar,tweise/incubator-apex-malhar,DataTorrent/incubator-apex-malhar,apache/incubator-apex-malhar,yogidevendra/apex-malhar,yogidevendra/incubator-apex-malhar,DataTorrent/incubator-apex-malhar,brightchen/apex-malhar,patilvikram/apex-malhar,tweise/incubator-apex-malhar,patilvikram/apex-malhar,tweise/apex-malhar,siyuanh/apex-malhar,prasannapramod/apex-malhar,yogidevendra/incubator-apex-malhar,tushargosavi/incubator-apex-malhar,davidyan74/apex-malhar,vrozov/incubator-apex-malhar,vrozov/incubator-apex-malhar,siyuanh/apex-malhar,yogidevendra/incubator-apex-malhar,vrozov/apex-malhar,tweise/incubator-apex-malhar,tweise/incubator-apex-malhar,ananthc/apex-malhar,brightchen/apex-malhar,siyuanh/apex-malhar,siyuanh/apex-malhar,tushargosavi/incubator-apex-malhar,trusli/apex-malhar,tushargosavi/incubator-apex-malhar,yogidevendra/apex-malhar,yogidevendra/apex-malhar,davidyan74/apex-malhar,patilvikram/apex-malhar,DataTorrent/incubator-apex-malhar,vrozov/apex-malhar,trusli/apex-malhar,DataTorrent/incubator-apex-malhar,siyuanh/incubator-apex-malhar,trusli/apex-malhar,vrozov/apex-malhar,ananthc/apex-malhar,apache/incubator-apex-malhar,tushargosavi/incubator-apex-malhar,vrozov/apex-malhar,davidyan74/apex-malhar,davidyan74/apex-malhar,siyuanh/incubator-apex-malhar,tushargosavi/incubator-apex-malhar,DataTorrent/incubator-apex-malhar,siyuanh/apex-malhar,patilvikram/apex-malhar,apache/incubator-apex-malhar,siyuanh/incubator-apex-malhar,prasannapramod/apex-malhar,yogidevendra/incubator-apex-malhar,tweise/incubator-apex-malhar,apache/incubator-apex-malhar,DataTorrent/incubator-apex-malhar,brightchen/apex-malhar,siyuanh/apex-malhar,ananthc/apex-malhar,apache/incubator-apex-malhar,DataTorrent/incubator-apex-malhar,tweise/apex-malhar,brightchen/apex-malhar,yogidevendra/apex-malhar,vrozov/incubator-apex-malhar,patilvikram/apex-malhar,vrozov/apex-malhar,ananthc/apex-malhar,siyuanh/incubator-apex-malhar,tweise/apex-malhar,tushargosavi/incubator-apex-malhar,trusli/apex-malhar,patilvikram/apex-malhar,brightchen/apex-malhar,tweise/apex-malhar,vrozov/apex-malhar,yogidevendra/incubator-apex-malhar,brightchen/apex-malhar,davidyan74/apex-malhar,tweise/incubator-apex-malhar,brightchen/apex-malhar,trusli/apex-malhar,siyuanh/incubator-apex-malhar,prasannapramod/apex-malhar | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.datatorrent.lib.io.fs;
import java.io.IOException;
import java.net.URI;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.atomic.AtomicReference;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.annotation.Nullable;
import javax.validation.Valid;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.apex.malhar.lib.wal.WindowDataManager;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.base.Throwables;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.datatorrent.api.Component;
import com.datatorrent.api.Context;
import com.datatorrent.api.InputOperator;
import com.datatorrent.api.Operator;
import com.datatorrent.api.annotation.OperatorAnnotation;
import com.datatorrent.api.annotation.Stateless;
/**
* Input operator that scans a directory for files and splits a file into blocks.<br/>
* The operator emits block metadata and file metadata.<br/>
*
* The file system/directory space should be different for different partitions of file splitter.
* The scanning of
*
* @displayName File Splitter
* @category Input
* @tags file
* @since 2.0.0
*/
@OperatorAnnotation(checkpointableWithinAppWindow = false)
public class FileSplitterInput extends AbstractFileSplitter implements InputOperator, Operator.CheckpointListener, Operator.CheckpointNotificationListener
{
@NotNull
private WindowDataManager windowDataManager;
protected transient LinkedList<ScannedFileInfo> currentWindowRecoveryState;
@Valid
@NotNull
private TimeBasedDirectoryScanner scanner;
private Map<String, Map<String, Long>> referenceTimes;
public FileSplitterInput()
{
super();
windowDataManager = new WindowDataManager.NoopWindowDataManager();
scanner = new TimeBasedDirectoryScanner();
}
@Override
public void setup(Context.OperatorContext context)
{
currentWindowRecoveryState = Lists.newLinkedList();
if (referenceTimes == null) {
referenceTimes = new ConcurrentHashMap<>();
}
scanner.setup(context);
windowDataManager.setup(context);
super.setup(context);
long largestRecoveryWindow = windowDataManager.getLargestCompletedWindow();
if (largestRecoveryWindow == Stateless.WINDOW_ID || context.getValue(Context.OperatorContext.ACTIVATION_WINDOW_ID) >
largestRecoveryWindow) {
scanner.startScanning(Collections.unmodifiableMap(referenceTimes));
}
}
@Override
public void beginWindow(long windowId)
{
super.beginWindow(windowId);
if (windowId <= windowDataManager.getLargestCompletedWindow()) {
replay(windowId);
}
}
protected void replay(long windowId)
{
try {
@SuppressWarnings("unchecked")
LinkedList<ScannedFileInfo> recoveredData = (LinkedList<ScannedFileInfo>)windowDataManager.retrieve(windowId);
if (recoveredData == null) {
//This could happen when there are multiple physical instances and one of them is ahead in processing windows.
return;
}
if (blockMetadataIterator != null) {
emitBlockMetadata();
}
for (ScannedFileInfo info : recoveredData) {
updateReferenceTimes(info);
FileMetadata fileMetadata = buildFileMetadata(info);
filesMetadataOutput.emit(fileMetadata);
blockMetadataIterator = new BlockMetadataIterator(this, fileMetadata, blockSize);
if (!emitBlockMetadata()) {
break;
}
}
} catch (IOException e) {
throw new RuntimeException("replay", e);
}
if (windowId == windowDataManager.getLargestCompletedWindow()) {
scanner.startScanning(Collections.unmodifiableMap(referenceTimes));
}
}
@Override
public void emitTuples()
{
if (currentWindowId <= windowDataManager.getLargestCompletedWindow()) {
return;
}
Throwable throwable;
if ((throwable = scanner.atomicThrowable.get()) != null) {
Throwables.propagate(throwable);
}
process();
}
@Override
protected FileInfo getFileInfo()
{
return scanner.pollFile();
}
@Override
protected boolean processFileInfo(FileInfo fileInfo)
{
ScannedFileInfo scannedFileInfo = (ScannedFileInfo)fileInfo;
currentWindowRecoveryState.add(scannedFileInfo);
updateReferenceTimes(scannedFileInfo);
return super.processFileInfo(fileInfo);
}
protected void updateReferenceTimes(ScannedFileInfo fileInfo)
{
Map<String, Long> referenceTimePerInputDir;
String referenceKey = fileInfo.getDirectoryPath() == null ? fileInfo.getFilePath() : fileInfo.getDirectoryPath();
if ((referenceTimePerInputDir = referenceTimes.get(referenceKey)) == null) {
referenceTimePerInputDir = new ConcurrentHashMap<>();
}
referenceTimePerInputDir.put(fileInfo.getFilePath(), fileInfo.modifiedTime);
referenceTimes.put(referenceKey, referenceTimePerInputDir);
}
@Override
public void endWindow()
{
if (currentWindowId > windowDataManager.getLargestCompletedWindow()) {
try {
windowDataManager.save(currentWindowRecoveryState, currentWindowId);
} catch (IOException e) {
throw new RuntimeException("saving recovery", e);
}
}
currentWindowRecoveryState.clear();
}
@Override
protected long getDefaultBlockSize()
{
return scanner.fs.getDefaultBlockSize(new Path(scanner.files.iterator().next()));
}
@Override
protected FileStatus getFileStatus(Path path) throws IOException
{
return scanner.fs.getFileStatus(path);
}
@Override
public void beforeCheckpoint(long l)
{
}
@Override
public void checkpointed(long l)
{
}
@Override
public void committed(long l)
{
try {
windowDataManager.committed(l);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public void teardown()
{
scanner.teardown();
}
public void setWindowDataManager(WindowDataManager windowDataManager)
{
this.windowDataManager = windowDataManager;
}
public WindowDataManager getWindowDataManager()
{
return this.windowDataManager;
}
public void setScanner(TimeBasedDirectoryScanner scanner)
{
this.scanner = scanner;
}
public TimeBasedDirectoryScanner getScanner()
{
return this.scanner;
}
public static class TimeBasedDirectoryScanner implements Runnable, Component<Context.OperatorContext>
{
private static long DEF_SCAN_INTERVAL_MILLIS = 5000;
private static String FILE_BEING_COPIED = "_COPYING_";
private boolean recursive = true;
private transient volatile boolean trigger;
@NotNull
@Size(min = 1)
private final Set<String> files = new LinkedHashSet<>();
@Min(0)
private long scanIntervalMillis = DEF_SCAN_INTERVAL_MILLIS;
private String filePatternRegularExp;
private String ignoreFilePatternRegularExp;
protected transient long lastScanMillis;
protected transient FileSystem fs;
protected transient LinkedBlockingDeque<ScannedFileInfo> discoveredFiles;
protected transient ExecutorService scanService;
protected transient AtomicReference<Throwable> atomicThrowable;
private transient volatile boolean running;
protected transient HashSet<String> ignoredFiles;
protected transient Pattern regex;
private transient Pattern ignoreRegex;
protected transient long sleepMillis;
protected transient Map<String, Map<String, Long>> referenceTimes;
private transient ScannedFileInfo lastScannedInfo;
private transient int numDiscoveredPerIteration;
@Override
public void setup(Context.OperatorContext context)
{
scanService = Executors.newSingleThreadExecutor();
discoveredFiles = new LinkedBlockingDeque<>();
atomicThrowable = new AtomicReference<>();
ignoredFiles = Sets.newHashSet();
sleepMillis = context.getValue(Context.OperatorContext.SPIN_MILLIS);
if (filePatternRegularExp != null) {
regex = Pattern.compile(filePatternRegularExp);
}
if (ignoreFilePatternRegularExp != null) {
ignoreRegex = Pattern.compile(this.ignoreFilePatternRegularExp);
}
try {
fs = getFSInstance();
} catch (IOException e) {
throw new RuntimeException("opening fs", e);
}
}
protected void startScanning(Map<String, Map<String, Long>> referenceTimes)
{
this.referenceTimes = Preconditions.checkNotNull(referenceTimes);
scanService.submit(this);
}
/**
* Stop scanner
*/
protected void stopScanning()
{
running = false;
}
@Override
public void teardown()
{
stopScanning();
scanService.shutdownNow();
try {
fs.close();
} catch (IOException e) {
throw new RuntimeException("closing fs", e);
}
}
protected FileSystem getFSInstance() throws IOException
{
return FileSystem.newInstance(new Path(files.iterator().next()).toUri(), new Configuration());
}
@Override
public void run()
{
running = true;
try {
while (running) {
if ((trigger || (System.currentTimeMillis() - scanIntervalMillis >= lastScanMillis)) && isIterationCompleted()) {
trigger = false;
lastScannedInfo = null;
numDiscoveredPerIteration = 0;
for (String afile : files) {
Path filePath = new Path(afile);
LOG.debug("Scan started for input {}", filePath);
Map<String, Long> lastModifiedTimesForInputDir = null;
if (fs.exists(filePath)) {
FileStatus fileStatus = fs.getFileStatus(filePath);
lastModifiedTimesForInputDir = referenceTimes.get(fileStatus.getPath().toUri().getPath());
}
scan(filePath, null, lastModifiedTimesForInputDir);
}
scanIterationComplete();
} else {
Thread.sleep(sleepMillis);
}
}
} catch (Throwable throwable) {
LOG.error("service", throwable);
running = false;
atomicThrowable.set(throwable);
Throwables.propagate(throwable);
}
}
//check if scanned files of last iteration are processed by operator thread
private boolean isIterationCompleted()
{
if (lastScannedInfo == null) { // first iteration started
return true;
}
LOG.debug("Directory path: {} Sub-Directory or File path: {}", lastScannedInfo.getDirectoryPath(), lastScannedInfo.getFilePath());
/*
* As referenceTimes is now concurrentHashMap, it throws exception if key passed is null.
* So in case where the last scanned directory is null which likely possible when
* only file name is specified instead of directory path.
*/
if (lastScannedInfo.getDirectoryPath() == null) {
return true;
}
Map<String, Long> referenceTime = referenceTimes.get(lastScannedInfo.getDirectoryPath());
if (referenceTime != null) {
return referenceTime.get(lastScannedInfo.getFilePath()) != null;
}
return false;
}
/**
* Operations that need to be done once a scan is complete.
*/
protected void scanIterationComplete()
{
LOG.debug("scan complete {} {}", lastScanMillis, numDiscoveredPerIteration);
lastScanMillis = System.currentTimeMillis();
}
/**
* This is not used anywhere and should be removed. however, currently it breaks backward compatibility, so
* just deprecating it.
*/
@Deprecated
protected void scan(@NotNull Path filePath, Path rootPath) throws IOException
{
Map<String, Long> lastModifiedTimesForInputDir;
lastModifiedTimesForInputDir = referenceTimes.get(filePath.toUri().getPath());
scan(filePath, rootPath, lastModifiedTimesForInputDir);
}
private void scan(Path filePath, Path rootPath, Map<String, Long> lastModifiedTimesForInputDir) throws IOException
{
FileStatus parentStatus = fs.getFileStatus(filePath);
String parentPathStr = filePath.toUri().getPath();
LOG.debug("scan {}", parentPathStr);
FileStatus[] childStatuses = fs.listStatus(filePath);
if (childStatuses.length == 0 && rootPath == null && (lastModifiedTimesForInputDir == null || lastModifiedTimesForInputDir.get(parentPathStr) == null)) { // empty input directory copy as is
ScannedFileInfo info = new ScannedFileInfo(null, filePath.toString(), parentStatus.getModificationTime());
processDiscoveredFile(info);
}
for (FileStatus childStatus : childStatuses) {
Path childPath = childStatus.getPath();
String childPathStr = childPath.toUri().getPath();
if (childStatus.isDirectory() && isRecursive()) {
addToDiscoveredFiles(rootPath, parentStatus, childStatus, lastModifiedTimesForInputDir);
scan(childPath, rootPath == null ? parentStatus.getPath() : rootPath, lastModifiedTimesForInputDir);
} else if (acceptFile(childPathStr)) {
addToDiscoveredFiles(rootPath, parentStatus, childStatus, lastModifiedTimesForInputDir);
} else {
// don't look at it again
ignoredFiles.add(childPathStr);
}
}
}
private void addToDiscoveredFiles(Path rootPath, FileStatus parentStatus, FileStatus childStatus,
Map<String, Long> lastModifiedTimesForInputDir) throws IOException
{
Path childPath = childStatus.getPath();
String childPathStr = childPath.toUri().getPath();
// Directory by now is scanned forcibly. Now check for whether file/directory needs to be added to discoveredFiles.
Long oldModificationTime = null;
if (lastModifiedTimesForInputDir != null) {
oldModificationTime = lastModifiedTimesForInputDir.get(childPathStr);
}
if (skipFile(childPath, childStatus.getModificationTime(), oldModificationTime) || // Skip dir or file if no timestamp modification
(childStatus.isDirectory() && (oldModificationTime != null))) { // If timestamp modified but if its a directory and already present in map, then skip.
return;
}
if (ignoredFiles.contains(childPathStr)) {
return;
}
ScannedFileInfo info = createScannedFileInfo(parentStatus.getPath(), parentStatus, childPath, childStatus,
rootPath);
processDiscoveredFile(info);
}
protected void processDiscoveredFile(ScannedFileInfo info)
{
numDiscoveredPerIteration++;
lastScannedInfo = info;
discoveredFiles.add(info);
LOG.debug("discovered {} {}", info.getFilePath(), info.modifiedTime);
}
protected ScannedFileInfo createScannedFileInfo(Path parentPath, FileStatus parentStatus, Path childPath,
FileStatus childStatus, Path rootPath)
{
ScannedFileInfo info;
if (rootPath == null) {
info = parentStatus.isDirectory() ?
new ScannedFileInfo(parentPath.toUri().getPath(), childPath.getName(), childStatus.getModificationTime()) :
new ScannedFileInfo(null, childPath.toUri().getPath(), childStatus.getModificationTime());
} else {
URI relativeChildURI = rootPath.toUri().relativize(childPath.toUri());
info = new ScannedFileInfo(rootPath.toUri().getPath(), relativeChildURI.getPath(),
childStatus.getModificationTime());
}
return info;
}
/**
* Skips file/directory based on their modification time.<br/>
*
* @param path file path
* @param modificationTime modification time
* @param lastModificationTime last cached directory modification time
* @return true to skip; false otherwise.
* @throws IOException
*/
protected static boolean skipFile(@SuppressWarnings("unused") @NotNull Path path, @NotNull Long modificationTime,
Long lastModificationTime) throws IOException
{
return (!(lastModificationTime == null || modificationTime > lastModificationTime));
}
/**
* Accepts file which match a regular pattern.
*
* @param filePathStr file path
* @return true if the path matches the pattern; false otherwise;
*/
protected boolean acceptFile(String filePathStr)
{
if (fs.getScheme().equalsIgnoreCase("hdfs") && filePathStr.endsWith(FILE_BEING_COPIED)) {
return false;
}
if (regex != null) {
Matcher matcher = regex.matcher(filePathStr);
if (!matcher.matches()) {
return false;
}
}
if (ignoreRegex != null) {
Matcher matcher = ignoreRegex.matcher(filePathStr);
if (matcher.matches()) {
return false;
}
}
return true;
}
public FileInfo pollFile()
{
return discoveredFiles.poll();
}
protected int getNumDiscoveredPerIteration()
{
return numDiscoveredPerIteration;
}
/**
* Gets the regular expression for file names to split.
*
* @return regular expression
*/
public String getFilePatternRegularExp()
{
return filePatternRegularExp;
}
/**
* Only files with names matching the given java regular expression are split.
*
* @param filePatternRegexp regular expression
*/
public void setFilePatternRegularExp(String filePatternRegexp)
{
this.filePatternRegularExp = filePatternRegexp;
}
/**
* @return the regular expression for ignored files.
*/
public String getIgnoreFilePatternRegularExp()
{
return ignoreFilePatternRegularExp;
}
/**
* Sets the regular expression for files that should be ignored.
*
* @param ignoreFilePatternRegex regular expression for files that will be ignored.
*/
public void setIgnoreFilePatternRegularExp(String ignoreFilePatternRegex)
{
this.ignoreFilePatternRegularExp = ignoreFilePatternRegex;
}
/**
* A comma separated list of directories to scan. If the path is not fully qualified the default
* file system is used. A fully qualified path can be provided to scan directories in other filesystems.
*
* @param files files
*/
public void setFiles(String files)
{
Iterables.addAll(this.files, Splitter.on(",").omitEmptyStrings().split(files));
}
/**
* Gets the files to be scanned.
*
* @return files to be scanned.
*/
public String getFiles()
{
return Joiner.on(",").join(this.files);
}
/**
* True if recursive; false otherwise.
*
* @param recursive true if recursive; false otherwise.
*/
public void setRecursive(boolean recursive)
{
this.recursive = recursive;
}
/**
* Sets whether scan will be recursive.
*
* @return true if recursive; false otherwise.
*/
public boolean isRecursive()
{
return this.recursive;
}
/**
* Sets the trigger which will initiate scan.
*
* @param trigger
*/
public void setTrigger(boolean trigger)
{
this.trigger = trigger;
}
/**
* The trigger which will initiate scan.
*
* @return trigger
*/
public boolean isTrigger()
{
return this.trigger;
}
/**
* Returns the frequency with which new files are scanned for in milliseconds.
*
* @return The scan interval in milliseconds.
*/
public long getScanIntervalMillis()
{
return scanIntervalMillis;
}
/**
* Sets the frequency with which new files are scanned for in milliseconds.
*
* @param scanIntervalMillis The scan interval in milliseconds.
*/
public void setScanIntervalMillis(long scanIntervalMillis)
{
this.scanIntervalMillis = scanIntervalMillis;
}
}
/**
* File info created for files discovered by scanner
*/
public static class ScannedFileInfo extends AbstractFileSplitter.FileInfo
{
protected final long modifiedTime;
protected ScannedFileInfo()
{
super();
modifiedTime = -1;
}
public ScannedFileInfo(@Nullable String directoryPath, @NotNull String relativeFilePath, long modifiedTime)
{
super(directoryPath, relativeFilePath);
this.modifiedTime = modifiedTime;
}
public long getModifiedTime()
{
return modifiedTime;
}
}
private static final Logger LOG = LoggerFactory.getLogger(FileSplitterInput.class);
}
| library/src/main/java/com/datatorrent/lib/io/fs/FileSplitterInput.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.datatorrent.lib.io.fs;
import java.io.IOException;
import java.net.URI;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.atomic.AtomicReference;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.annotation.Nullable;
import javax.validation.Valid;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.apex.malhar.lib.wal.WindowDataManager;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.base.Throwables;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.datatorrent.api.Component;
import com.datatorrent.api.Context;
import com.datatorrent.api.InputOperator;
import com.datatorrent.api.Operator;
import com.datatorrent.api.annotation.OperatorAnnotation;
import com.datatorrent.api.annotation.Stateless;
/**
* Input operator that scans a directory for files and splits a file into blocks.<br/>
* The operator emits block metadata and file metadata.<br/>
*
* The file system/directory space should be different for different partitions of file splitter.
* The scanning of
*
* @displayName File Splitter
* @category Input
* @tags file
* @since 2.0.0
*/
@OperatorAnnotation(checkpointableWithinAppWindow = false)
public class FileSplitterInput extends AbstractFileSplitter implements InputOperator, Operator.CheckpointListener, Operator.CheckpointNotificationListener
{
@NotNull
private WindowDataManager windowDataManager;
protected transient LinkedList<ScannedFileInfo> currentWindowRecoveryState;
@Valid
@NotNull
private TimeBasedDirectoryScanner scanner;
private Map<String, Map<String, Long>> referenceTimes;
public FileSplitterInput()
{
super();
windowDataManager = new WindowDataManager.NoopWindowDataManager();
scanner = new TimeBasedDirectoryScanner();
}
@Override
public void setup(Context.OperatorContext context)
{
currentWindowRecoveryState = Lists.newLinkedList();
if (referenceTimes == null) {
referenceTimes = new ConcurrentHashMap<>();
}
scanner.setup(context);
windowDataManager.setup(context);
super.setup(context);
long largestRecoveryWindow = windowDataManager.getLargestCompletedWindow();
if (largestRecoveryWindow == Stateless.WINDOW_ID || context.getValue(Context.OperatorContext.ACTIVATION_WINDOW_ID) >
largestRecoveryWindow) {
scanner.startScanning(Collections.unmodifiableMap(referenceTimes));
}
}
@Override
public void beginWindow(long windowId)
{
super.beginWindow(windowId);
if (windowId <= windowDataManager.getLargestCompletedWindow()) {
replay(windowId);
}
}
protected void replay(long windowId)
{
try {
@SuppressWarnings("unchecked")
LinkedList<ScannedFileInfo> recoveredData = (LinkedList<ScannedFileInfo>)windowDataManager.retrieve(windowId);
if (recoveredData == null) {
//This could happen when there are multiple physical instances and one of them is ahead in processing windows.
return;
}
if (blockMetadataIterator != null) {
emitBlockMetadata();
}
for (ScannedFileInfo info : recoveredData) {
updateReferenceTimes(info);
FileMetadata fileMetadata = buildFileMetadata(info);
filesMetadataOutput.emit(fileMetadata);
blockMetadataIterator = new BlockMetadataIterator(this, fileMetadata, blockSize);
if (!emitBlockMetadata()) {
break;
}
}
} catch (IOException e) {
throw new RuntimeException("replay", e);
}
if (windowId == windowDataManager.getLargestCompletedWindow()) {
scanner.startScanning(Collections.unmodifiableMap(referenceTimes));
}
}
@Override
public void emitTuples()
{
if (currentWindowId <= windowDataManager.getLargestCompletedWindow()) {
return;
}
Throwable throwable;
if ((throwable = scanner.atomicThrowable.get()) != null) {
Throwables.propagate(throwable);
}
process();
}
@Override
protected FileInfo getFileInfo()
{
return scanner.pollFile();
}
@Override
protected boolean processFileInfo(FileInfo fileInfo)
{
ScannedFileInfo scannedFileInfo = (ScannedFileInfo)fileInfo;
currentWindowRecoveryState.add(scannedFileInfo);
updateReferenceTimes(scannedFileInfo);
return super.processFileInfo(fileInfo);
}
protected void updateReferenceTimes(ScannedFileInfo fileInfo)
{
Map<String, Long> referenceTimePerInputDir;
String referenceKey = fileInfo.getDirectoryPath() == null ? fileInfo.getFilePath() : fileInfo.getDirectoryPath();
if ((referenceTimePerInputDir = referenceTimes.get(referenceKey)) == null) {
referenceTimePerInputDir = new ConcurrentHashMap<>();
}
referenceTimePerInputDir.put(fileInfo.getFilePath(), fileInfo.modifiedTime);
referenceTimes.put(referenceKey, referenceTimePerInputDir);
}
@Override
public void endWindow()
{
if (currentWindowId > windowDataManager.getLargestCompletedWindow()) {
try {
windowDataManager.save(currentWindowRecoveryState, currentWindowId);
} catch (IOException e) {
throw new RuntimeException("saving recovery", e);
}
}
currentWindowRecoveryState.clear();
}
@Override
protected long getDefaultBlockSize()
{
return scanner.fs.getDefaultBlockSize(new Path(scanner.files.iterator().next()));
}
@Override
protected FileStatus getFileStatus(Path path) throws IOException
{
return scanner.fs.getFileStatus(path);
}
@Override
public void beforeCheckpoint(long l)
{
}
@Override
public void checkpointed(long l)
{
}
@Override
public void committed(long l)
{
try {
windowDataManager.committed(l);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public void teardown()
{
scanner.teardown();
}
public void setWindowDataManager(WindowDataManager windowDataManager)
{
this.windowDataManager = windowDataManager;
}
public WindowDataManager getWindowDataManager()
{
return this.windowDataManager;
}
public void setScanner(TimeBasedDirectoryScanner scanner)
{
this.scanner = scanner;
}
public TimeBasedDirectoryScanner getScanner()
{
return this.scanner;
}
public static class TimeBasedDirectoryScanner implements Runnable, Component<Context.OperatorContext>
{
private static long DEF_SCAN_INTERVAL_MILLIS = 5000;
private static String FILE_BEING_COPIED = "_COPYING_";
private boolean recursive = true;
private transient volatile boolean trigger;
@NotNull
@Size(min = 1)
private final Set<String> files = new LinkedHashSet<>();
@Min(0)
private long scanIntervalMillis = DEF_SCAN_INTERVAL_MILLIS;
private String filePatternRegularExp;
private String ignoreFilePatternRegularExp;
protected transient long lastScanMillis;
protected transient FileSystem fs;
protected transient LinkedBlockingDeque<ScannedFileInfo> discoveredFiles;
protected transient ExecutorService scanService;
protected transient AtomicReference<Throwable> atomicThrowable;
private transient volatile boolean running;
protected transient HashSet<String> ignoredFiles;
protected transient Pattern regex;
private transient Pattern ignoreRegex;
protected transient long sleepMillis;
protected transient Map<String, Map<String, Long>> referenceTimes;
private transient ScannedFileInfo lastScannedInfo;
private transient int numDiscoveredPerIteration;
@Override
public void setup(Context.OperatorContext context)
{
scanService = Executors.newSingleThreadExecutor();
discoveredFiles = new LinkedBlockingDeque<>();
atomicThrowable = new AtomicReference<>();
ignoredFiles = Sets.newHashSet();
sleepMillis = context.getValue(Context.OperatorContext.SPIN_MILLIS);
if (filePatternRegularExp != null) {
regex = Pattern.compile(filePatternRegularExp);
}
if (ignoreFilePatternRegularExp != null) {
ignoreRegex = Pattern.compile(this.ignoreFilePatternRegularExp);
}
try {
fs = getFSInstance();
} catch (IOException e) {
throw new RuntimeException("opening fs", e);
}
}
protected void startScanning(Map<String, Map<String, Long>> referenceTimes)
{
this.referenceTimes = Preconditions.checkNotNull(referenceTimes);
scanService.submit(this);
}
/**
* Stop scanner
*/
protected void stopScanning()
{
running = false;
}
@Override
public void teardown()
{
stopScanning();
scanService.shutdownNow();
try {
fs.close();
} catch (IOException e) {
throw new RuntimeException("closing fs", e);
}
}
protected FileSystem getFSInstance() throws IOException
{
return FileSystem.newInstance(new Path(files.iterator().next()).toUri(), new Configuration());
}
@Override
public void run()
{
running = true;
try {
while (running) {
if ((trigger || (System.currentTimeMillis() - scanIntervalMillis >= lastScanMillis)) && isIterationCompleted()) {
trigger = false;
lastScannedInfo = null;
numDiscoveredPerIteration = 0;
for (String afile : files) {
Path filePath = new Path(afile);
LOG.debug("Scan started for input {}", filePath);
Map<String, Long> lastModifiedTimesForInputDir = null;
if (fs.exists(filePath)) {
FileStatus fileStatus = fs.getFileStatus(filePath);
lastModifiedTimesForInputDir = referenceTimes.get(fileStatus.getPath().toUri().getPath());
}
scan(filePath, null, lastModifiedTimesForInputDir);
}
scanIterationComplete();
} else {
Thread.sleep(sleepMillis);
}
}
} catch (Throwable throwable) {
LOG.error("service", throwable);
running = false;
atomicThrowable.set(throwable);
Throwables.propagate(throwable);
}
}
//check if scanned files of last iteration are processed by operator thread
private boolean isIterationCompleted()
{
if (lastScannedInfo == null) { // first iteration started
return true;
}
Map<String, Long> referenceTime = referenceTimes.get(lastScannedInfo.getDirectoryPath());
if (referenceTime != null) {
return referenceTime.get(lastScannedInfo.getFilePath()) != null;
}
return false;
}
/**
* Operations that need to be done once a scan is complete.
*/
protected void scanIterationComplete()
{
LOG.debug("scan complete {} {}", lastScanMillis, numDiscoveredPerIteration);
lastScanMillis = System.currentTimeMillis();
}
/**
* This is not used anywhere and should be removed. however, currently it breaks backward compatibility, so
* just deprecating it.
*/
@Deprecated
protected void scan(@NotNull Path filePath, Path rootPath) throws IOException
{
Map<String, Long> lastModifiedTimesForInputDir;
lastModifiedTimesForInputDir = referenceTimes.get(filePath.toUri().getPath());
scan(filePath, rootPath, lastModifiedTimesForInputDir);
}
private void scan(Path filePath, Path rootPath, Map<String, Long> lastModifiedTimesForInputDir) throws IOException
{
FileStatus parentStatus = fs.getFileStatus(filePath);
String parentPathStr = filePath.toUri().getPath();
LOG.debug("scan {}", parentPathStr);
FileStatus[] childStatuses = fs.listStatus(filePath);
if (childStatuses.length == 0 && rootPath == null && (lastModifiedTimesForInputDir == null || lastModifiedTimesForInputDir.get(parentPathStr) == null)) { // empty input directory copy as is
ScannedFileInfo info = new ScannedFileInfo(null, filePath.toString(), parentStatus.getModificationTime());
processDiscoveredFile(info);
}
for (FileStatus childStatus : childStatuses) {
Path childPath = childStatus.getPath();
String childPathStr = childPath.toUri().getPath();
if (childStatus.isDirectory() && isRecursive()) {
addToDiscoveredFiles(rootPath, parentStatus, childStatus, lastModifiedTimesForInputDir);
scan(childPath, rootPath == null ? parentStatus.getPath() : rootPath, lastModifiedTimesForInputDir);
} else if (acceptFile(childPathStr)) {
addToDiscoveredFiles(rootPath, parentStatus, childStatus, lastModifiedTimesForInputDir);
} else {
// don't look at it again
ignoredFiles.add(childPathStr);
}
}
}
private void addToDiscoveredFiles(Path rootPath, FileStatus parentStatus, FileStatus childStatus,
Map<String, Long> lastModifiedTimesForInputDir) throws IOException
{
Path childPath = childStatus.getPath();
String childPathStr = childPath.toUri().getPath();
// Directory by now is scanned forcibly. Now check for whether file/directory needs to be added to discoveredFiles.
Long oldModificationTime = null;
if (lastModifiedTimesForInputDir != null) {
oldModificationTime = lastModifiedTimesForInputDir.get(childPathStr);
}
if (skipFile(childPath, childStatus.getModificationTime(), oldModificationTime) || // Skip dir or file if no timestamp modification
(childStatus.isDirectory() && (oldModificationTime != null))) { // If timestamp modified but if its a directory and already present in map, then skip.
return;
}
if (ignoredFiles.contains(childPathStr)) {
return;
}
ScannedFileInfo info = createScannedFileInfo(parentStatus.getPath(), parentStatus, childPath, childStatus,
rootPath);
processDiscoveredFile(info);
}
protected void processDiscoveredFile(ScannedFileInfo info)
{
numDiscoveredPerIteration++;
lastScannedInfo = info;
discoveredFiles.add(info);
LOG.debug("discovered {} {}", info.getFilePath(), info.modifiedTime);
}
protected ScannedFileInfo createScannedFileInfo(Path parentPath, FileStatus parentStatus, Path childPath,
FileStatus childStatus, Path rootPath)
{
ScannedFileInfo info;
if (rootPath == null) {
info = parentStatus.isDirectory() ?
new ScannedFileInfo(parentPath.toUri().getPath(), childPath.getName(), childStatus.getModificationTime()) :
new ScannedFileInfo(null, childPath.toUri().getPath(), childStatus.getModificationTime());
} else {
URI relativeChildURI = rootPath.toUri().relativize(childPath.toUri());
info = new ScannedFileInfo(rootPath.toUri().getPath(), relativeChildURI.getPath(),
childStatus.getModificationTime());
}
return info;
}
/**
* Skips file/directory based on their modification time.<br/>
*
* @param path file path
* @param modificationTime modification time
* @param lastModificationTime last cached directory modification time
* @return true to skip; false otherwise.
* @throws IOException
*/
protected static boolean skipFile(@SuppressWarnings("unused") @NotNull Path path, @NotNull Long modificationTime,
Long lastModificationTime) throws IOException
{
return (!(lastModificationTime == null || modificationTime > lastModificationTime));
}
/**
* Accepts file which match a regular pattern.
*
* @param filePathStr file path
* @return true if the path matches the pattern; false otherwise;
*/
protected boolean acceptFile(String filePathStr)
{
if (fs.getScheme().equalsIgnoreCase("hdfs") && filePathStr.endsWith(FILE_BEING_COPIED)) {
return false;
}
if (regex != null) {
Matcher matcher = regex.matcher(filePathStr);
if (!matcher.matches()) {
return false;
}
}
if (ignoreRegex != null) {
Matcher matcher = ignoreRegex.matcher(filePathStr);
if (matcher.matches()) {
return false;
}
}
return true;
}
public FileInfo pollFile()
{
return discoveredFiles.poll();
}
protected int getNumDiscoveredPerIteration()
{
return numDiscoveredPerIteration;
}
/**
* Gets the regular expression for file names to split.
*
* @return regular expression
*/
public String getFilePatternRegularExp()
{
return filePatternRegularExp;
}
/**
* Only files with names matching the given java regular expression are split.
*
* @param filePatternRegexp regular expression
*/
public void setFilePatternRegularExp(String filePatternRegexp)
{
this.filePatternRegularExp = filePatternRegexp;
}
/**
* @return the regular expression for ignored files.
*/
public String getIgnoreFilePatternRegularExp()
{
return ignoreFilePatternRegularExp;
}
/**
* Sets the regular expression for files that should be ignored.
*
* @param ignoreFilePatternRegex regular expression for files that will be ignored.
*/
public void setIgnoreFilePatternRegularExp(String ignoreFilePatternRegex)
{
this.ignoreFilePatternRegularExp = ignoreFilePatternRegex;
}
/**
* A comma separated list of directories to scan. If the path is not fully qualified the default
* file system is used. A fully qualified path can be provided to scan directories in other filesystems.
*
* @param files files
*/
public void setFiles(String files)
{
Iterables.addAll(this.files, Splitter.on(",").omitEmptyStrings().split(files));
}
/**
* Gets the files to be scanned.
*
* @return files to be scanned.
*/
public String getFiles()
{
return Joiner.on(",").join(this.files);
}
/**
* True if recursive; false otherwise.
*
* @param recursive true if recursive; false otherwise.
*/
public void setRecursive(boolean recursive)
{
this.recursive = recursive;
}
/**
* Sets whether scan will be recursive.
*
* @return true if recursive; false otherwise.
*/
public boolean isRecursive()
{
return this.recursive;
}
/**
* Sets the trigger which will initiate scan.
*
* @param trigger
*/
public void setTrigger(boolean trigger)
{
this.trigger = trigger;
}
/**
* The trigger which will initiate scan.
*
* @return trigger
*/
public boolean isTrigger()
{
return this.trigger;
}
/**
* Returns the frequency with which new files are scanned for in milliseconds.
*
* @return The scan interval in milliseconds.
*/
public long getScanIntervalMillis()
{
return scanIntervalMillis;
}
/**
* Sets the frequency with which new files are scanned for in milliseconds.
*
* @param scanIntervalMillis The scan interval in milliseconds.
*/
public void setScanIntervalMillis(long scanIntervalMillis)
{
this.scanIntervalMillis = scanIntervalMillis;
}
}
/**
* File info created for files discovered by scanner
*/
public static class ScannedFileInfo extends AbstractFileSplitter.FileInfo
{
protected final long modifiedTime;
protected ScannedFileInfo()
{
super();
modifiedTime = -1;
}
public ScannedFileInfo(@Nullable String directoryPath, @NotNull String relativeFilePath, long modifiedTime)
{
super(directoryPath, relativeFilePath);
this.modifiedTime = modifiedTime;
}
public long getModifiedTime()
{
return modifiedTime;
}
}
private static final Logger LOG = LoggerFactory.getLogger(FileSplitterInput.class);
}
| APEXMALHAR-2312 Fix NullPointerException for FileSplitterInput Operator if filepath is specified.
Problem Description:
-------------------
1) TimeBasedDirectoryScanner threads part of scanservice tries to scan the directories/files.
2) Each thread checks with help of isIterationCompleted() [referenceTimes] method whether scanned of last iteration are processed by operator thread.
3) Previously it used to work because HashMap (referenceTimes) used to return null even if last scanned directory path is null.
4) Recently referenceTimes is changed to ConcurrentHashMap, so get() doesn't allow null key's passed to ConcurrentHashMap get() method.
5) Hence NullPointerException is seen as if only file path is provided directory path would be empty hence key would be empty.
Solution:
---------
Pre-check that directory path is null then we have completed last iterations if only filepath is provided.
| library/src/main/java/com/datatorrent/lib/io/fs/FileSplitterInput.java | APEXMALHAR-2312 Fix NullPointerException for FileSplitterInput Operator if filepath is specified. |
|
Java | apache-2.0 | c17e8e537968092547daaa4872dfb9100d89895f | 0 | obidea/semantika | /*
* Copyright (c) 2013-2014 Josef Hardi <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.obidea.semantika.mapping.parser.r2rml;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import io.github.johardi.r2rmlparser.document.IMappingVisitor;
import io.github.johardi.r2rmlparser.document.LogicalTable;
import io.github.johardi.r2rmlparser.document.ObjectMap;
import io.github.johardi.r2rmlparser.document.PredicateMap;
import io.github.johardi.r2rmlparser.document.PredicateObjectMap;
import io.github.johardi.r2rmlparser.document.RefObjectMap;
import io.github.johardi.r2rmlparser.document.SubjectMap;
import io.github.johardi.r2rmlparser.document.TermMap;
import com.obidea.semantika.datatype.DataType;
import com.obidea.semantika.exception.SemantikaRuntimeException;
import com.obidea.semantika.expression.base.ITerm;
import com.obidea.semantika.expression.base.Literal;
import com.obidea.semantika.expression.base.UriReference;
import com.obidea.semantika.mapping.IMappingFactory.IMetaModel;
import com.obidea.semantika.mapping.base.ClassMapping;
import com.obidea.semantika.mapping.base.PropertyMapping;
import com.obidea.semantika.mapping.base.sql.SqlColumn;
import com.obidea.semantika.mapping.parser.AbstractMappingHandler;
import com.obidea.semantika.mapping.parser.R2RmlVocabulary;
public class R2RmlMappingHandler extends AbstractMappingHandler implements IMappingVisitor
{
public R2RmlMappingHandler(IMetaModel metaModel)
{
super(metaModel);
}
@Override
public void visit(LogicalTable arg)
{
setSqlQuery(arg.getTableView().getSqlQuery());
}
@Override
public void visit(SubjectMap arg)
{
validateSubjectMap(arg);
setClassUri(arg.getClassIri());
int termMap = arg.getType();
String value = arg.getValue();
switch (termMap) {
case TermMap.COLUMN_VALUE:
throw new SemantikaRuntimeException("Subject map cannot use column-valued term map"); //$NON-NLS-1$
case TermMap.CONSTANT_VALUE:
setSubjectMapValue(getExpressionObjectFactory().getUriReference(createUri(value)));
break;
case TermMap.TEMPLATE_VALUE:
R2RmlTemplate template = new R2RmlTemplate(value);
List<SqlColumn> parameters = getColumnTerms(template.getColumnNames());
setSubjectMapValue(getMappingObjectFactory().createUriTemplate(template.getTemplateString(), parameters));
break;
}
// Create the class mapping if a class URI specified in the mapping
if (getClassUri() != null) {
ClassMapping cm = getMappingObjectFactory().createClassMapping(getClassUri(), getSqlQuery());
cm.setSubjectMapValue(getSubjectMapValue()); // subject template
addMapping(cm);
}
}
/*
* Validation procedure based on http://www.w3.org/TR/r2rml/#termtype
*/
private void validateSubjectMap(SubjectMap arg)
{
String termType = arg.getTermType();
if (termType.equals(R2RmlVocabulary.LITERAL)) {
throw new SemantikaRuntimeException("Subject map cannot have term type rr:Literal"); //$NON-NLS-1$
}
}
@Override
public void visit(PredicateObjectMap arg)
{
arg.getPredicateMap().accept(this);
arg.getObjectMap().accept(this);
PropertyMapping pm = getMappingObjectFactory().createPropertyMapping(getPropertyUri(), getSqlQuery());
pm.setSubjectMapValue(getSubjectMapValue());
pm.setObjectMapValue(getObjectMapValue());
addMapping(pm);
}
@Override
public void visit(PredicateMap arg)
{
validatePredicateMap(arg);
int termMap = arg.getType();
String value = arg.getValue();
switch (termMap) {
case TermMap.COLUMN_VALUE:
throw new SemantikaRuntimeException("Predicate map cannot use column-valued term map"); //$NON-NLS-1$
case TermMap.CONSTANT_VALUE:
setPredicateMapValue(getExpressionObjectFactory().getUriReference(createUri(value)));
break;
case TermMap.TEMPLATE_VALUE:
throw new SemantikaRuntimeException("Predicate map cannot use template-valued term map"); //$NON-NLS-1$
}
}
/*
* Validation procedure based on http://www.w3.org/TR/r2rml/#termtype
*/
private void validatePredicateMap(PredicateMap arg)
{
String termType = arg.getTermType();
if (termType.equals(R2RmlVocabulary.LITERAL)) {
throw new SemantikaRuntimeException("Subject map cannot have term type rr:Literal"); //$NON-NLS-1$
}
else if (termType.equals(R2RmlVocabulary.BLANK_NODE)) {
throw new SemantikaRuntimeException("Subject map cannot have term type rr:BlankNode"); //$NON-NLS-1$
}
}
@Override
public void visit(ObjectMap arg)
{
int termMap = arg.getType();
String value = arg.getValue();
String termType = arg.getTermType();
String datatype = arg.getDatatype();
switch (termMap) {
case TermMap.COLUMN_VALUE:
setObjectMapValue(getColumnTerm(value, termType, datatype));
break;
case TermMap.CONSTANT_VALUE:
setObjectMapValue(getLiteralTerm(value, termType, datatype));
break;
case TermMap.TEMPLATE_VALUE:
R2RmlTemplate template = new R2RmlTemplate(value);
String templateString = template.getTemplateString();
List<SqlColumn> parameters = getColumnTerms(template.getColumnNames());
setObjectMapValue(getMappingObjectFactory().createUriTemplate(templateString, parameters));
break;
}
}
@Override
public void visit(RefObjectMap arg)
{
// NO-OP
}
/*
* Private utility methods
*/
private URI getPropertyUri()
{
return ((UriReference) getPredicateMapValue()).toUri();
}
private SqlColumn getColumnTerm(String columnName, String termType, String datatype)
{
if (termType.equals(R2RmlVocabulary.IRI)) {
if (StringUtils.isEmpty(datatype)) {
SqlColumn column = getColumnTerm(columnName);
column.setUserDatatype(DataType.ANY_URI); // make it as an IRI object
return column;
}
else {
throw new SemantikaRuntimeException("Cannot use rr:datatype together with term type rr:IRI"); //$NON-NLS-1$
}
}
else if (termType.equals(R2RmlVocabulary.LITERAL)) {
if (StringUtils.isEmpty(datatype)) {
return getColumnTerm(columnName); // set as natural RDF literal
}
else {
SqlColumn column = getColumnTerm(columnName);
column.setUserDatatype(datatype);
return column; // set as datatype-override RDF literal
}
}
else if (termType.equals(R2RmlVocabulary.BLANK_NODE)) {
throw new SemantikaRuntimeException("Blank node is not supported yet"); //$NON-NLS-1$
}
throw new SemantikaRuntimeException("Unknown term type \"" + termType + "\""); //$NON-NLS-1$ //$NON-NLS-2$
}
private SqlColumn getColumnTerm(String columnName)
{
SqlColumn column = (SqlColumn) getSqlQuery().findSelectItemExpression(columnName);
if (column != null) {
return column;
}
throw new SemantikaRuntimeException("Unknown column name \"" + columnName + "\")"); //$NON-NLS-1$ //$NON-NLS-2$
}
private ITerm getLiteralTerm(String value, String termType, String datatype)
{
if (termType.equals(R2RmlVocabulary.IRI)) {
if (StringUtils.isEmpty(datatype)) {
UriReference uri = getExpressionObjectFactory().getUriReference(createUri(value));
return uri;
}
else {
throw new SemantikaRuntimeException("Cannot use rr:datatype together with term type rr:IRI"); //$NON-NLS-1$
}
}
else if (termType.equals(R2RmlVocabulary.LITERAL)) {
if (StringUtils.isEmpty(datatype)) {
Literal literal = getExpressionObjectFactory().getLiteral(value, DataType.STRING); // by default
return literal;
}
else {
Literal literal = getExpressionObjectFactory().getLiteral(value, datatype);
return literal;
}
}
else if (termType.equals(R2RmlVocabulary.BLANK_NODE)) {
throw new SemantikaRuntimeException("Blank node is not supported yet"); //$NON-NLS-1$
}
throw new SemantikaRuntimeException("Unknown term type \"" + termType + "\""); //$NON-NLS-1$ //$NON-NLS-2$
}
private List<SqlColumn> getColumnTerms(List<String> columnNames)
{
List<SqlColumn> toReturn = new ArrayList<SqlColumn>();
for (String columnName : columnNames) {
toReturn.add(getColumnTerm(columnName));
}
return toReturn;
}
}
| src/com/obidea/semantika/mapping/parser/r2rml/R2RmlMappingHandler.java | /*
* Copyright (c) 2013-2014 Josef Hardi <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.obidea.semantika.mapping.parser.r2rml;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import io.github.johardi.r2rmlparser.document.IMappingVisitor;
import io.github.johardi.r2rmlparser.document.LogicalTable;
import io.github.johardi.r2rmlparser.document.ObjectMap;
import io.github.johardi.r2rmlparser.document.PredicateMap;
import io.github.johardi.r2rmlparser.document.PredicateObjectMap;
import io.github.johardi.r2rmlparser.document.RefObjectMap;
import io.github.johardi.r2rmlparser.document.SubjectMap;
import io.github.johardi.r2rmlparser.document.TermMap;
import com.obidea.semantika.datatype.DataType;
import com.obidea.semantika.exception.SemantikaRuntimeException;
import com.obidea.semantika.expression.base.ITerm;
import com.obidea.semantika.expression.base.Literal;
import com.obidea.semantika.expression.base.UriReference;
import com.obidea.semantika.mapping.IMappingFactory.IMetaModel;
import com.obidea.semantika.mapping.base.ClassMapping;
import com.obidea.semantika.mapping.base.PropertyMapping;
import com.obidea.semantika.mapping.base.sql.SqlColumn;
import com.obidea.semantika.mapping.parser.AbstractMappingHandler;
import com.obidea.semantika.mapping.parser.R2RmlVocabulary;
public class R2RmlMappingHandler extends AbstractMappingHandler implements IMappingVisitor
{
public R2RmlMappingHandler(IMetaModel metaModel)
{
super(metaModel);
}
@Override
public void visit(LogicalTable arg)
{
setSqlQuery(arg.getTableView().getSqlQuery());
}
@Override
public void visit(SubjectMap arg)
{
validateSubjectMap(arg);
setClassUri(arg.getClassIri());
int termMap = arg.getType();
String value = arg.getValue();
switch (termMap) {
case TermMap.COLUMN_VALUE:
throw new SemantikaRuntimeException("Subject map cannot use column-valued term map");
case TermMap.CONSTANT_VALUE:
setSubjectMapValue(getExpressionObjectFactory().getUriReference(createUri(value)));
break;
case TermMap.TEMPLATE_VALUE:
R2RmlTemplate template = new R2RmlTemplate(value);
List<SqlColumn> parameters = getColumnTerms(template.getColumnNames());
setSubjectMapValue(getMappingObjectFactory().createUriTemplate(template.getTemplateString(), parameters));
break;
}
// Create the class mapping if a class URI specified in the mapping
if (getClassUri() != null) {
ClassMapping cm = getMappingObjectFactory().createClassMapping(getClassUri(), getSqlQuery());
cm.setSubjectMapValue(getSubjectMapValue()); // subject template
addMapping(cm);
}
}
/*
* Validation procedure based on http://www.w3.org/TR/r2rml/#termtype
*/
private void validateSubjectMap(SubjectMap arg)
{
String termType = arg.getTermType();
if (termType.equals(R2RmlVocabulary.LITERAL)) {
throw new SemantikaRuntimeException("Subject map cannot have term type rr:Literal");
}
}
@Override
public void visit(PredicateObjectMap arg)
{
arg.getPredicateMap().accept(this);
arg.getObjectMap().accept(this);
PropertyMapping pm = getMappingObjectFactory().createPropertyMapping(getPropertyUri(), getSqlQuery());
pm.setSubjectMapValue(getSubjectMapValue());
pm.setObjectMapValue(getObjectMapValue());
addMapping(pm);
}
@Override
public void visit(PredicateMap arg)
{
validatePredicateMap(arg);
int termMap = arg.getType();
String value = arg.getValue();
switch (termMap) {
case TermMap.COLUMN_VALUE:
throw new SemantikaRuntimeException("Predicate map cannot use column-valued term map");
case TermMap.CONSTANT_VALUE:
setPredicateMapValue(getExpressionObjectFactory().getUriReference(createUri(value)));
break;
case TermMap.TEMPLATE_VALUE:
throw new SemantikaRuntimeException("Predicate map cannot use template-valued term map");
}
}
/*
* Validation procedure based on http://www.w3.org/TR/r2rml/#termtype
*/
private void validatePredicateMap(PredicateMap arg)
{
String termType = arg.getTermType();
if (termType.equals(R2RmlVocabulary.LITERAL)) {
throw new SemantikaRuntimeException("Subject map cannot have term type rr:Literal");
}
else if (termType.equals(R2RmlVocabulary.BLANK_NODE)) {
throw new SemantikaRuntimeException("Subject map cannot have term type rr:BlankNode");
}
}
@Override
public void visit(ObjectMap arg)
{
int termMap = arg.getType();
String value = arg.getValue();
String termType = arg.getTermType();
String datatype = arg.getDatatype();
switch (termMap) {
case TermMap.COLUMN_VALUE:
setObjectMapValue(getColumnTerm(value, termType, datatype));
break;
case TermMap.CONSTANT_VALUE:
setObjectMapValue(getLiteralTerm(value, termType, datatype));
break;
case TermMap.TEMPLATE_VALUE:
R2RmlTemplate template = new R2RmlTemplate(value);
String templateString = template.getTemplateString();
List<SqlColumn> parameters = getColumnTerms(template.getColumnNames());
setObjectMapValue(getMappingObjectFactory().createUriTemplate(templateString, parameters));
break;
}
}
@Override
public void visit(RefObjectMap arg)
{
// NO-OP
}
/*
* Private utility methods
*/
private URI getPropertyUri()
{
return ((UriReference) getPredicateMapValue()).toUri();
}
private SqlColumn getColumnTerm(String columnName, String termType, String datatype)
{
if (termType.equals(R2RmlVocabulary.IRI)) {
if (StringUtils.isEmpty(datatype)) {
SqlColumn column = getColumnTerm(columnName);
column.setUserDatatype(DataType.ANY_URI); // make it as an IRI object
return column;
}
else {
throw new SemantikaRuntimeException("Illegal operation: Can't use rr:datatype together with term type rr:IRI");
}
}
else if (termType.equals(R2RmlVocabulary.LITERAL)) {
if (StringUtils.isEmpty(datatype)) {
return getColumnTerm(columnName); // set as natural RDF literal
}
else {
SqlColumn column = getColumnTerm(columnName);
column.setUserDatatype(datatype);
return column; // set as datatype-override RDF literal
}
}
else if (termType.equals(R2RmlVocabulary.BLANK_NODE)) {
throw new SemantikaRuntimeException("Blank node is not supported yet");
}
throw new SemantikaRuntimeException("Unknown term type \"" + termType + "\"");
}
private SqlColumn getColumnTerm(String columnName)
{
SqlColumn column = (SqlColumn) getSqlQuery().findSelectItemExpression(columnName);
if (column != null) {
return column;
}
throw new SemantikaRuntimeException("Unknown column name in template-valued term map \"" + columnName + "\")");
}
private ITerm getLiteralTerm(String value, String termType, String datatype)
{
if (termType.equals(R2RmlVocabulary.IRI)) {
if (StringUtils.isEmpty(datatype)) {
UriReference uri = getExpressionObjectFactory().getUriReference(createUri(value));
return uri;
}
else {
throw new SemantikaRuntimeException("Illegal operation: Can't use rr:datatype together with term type rr:IRI");
}
}
else if (termType.equals(R2RmlVocabulary.LITERAL)) {
if (StringUtils.isEmpty(datatype)) {
Literal literal = getExpressionObjectFactory().getLiteral(value, DataType.STRING); // by default
return literal;
}
else {
Literal literal = getExpressionObjectFactory().getLiteral(value, datatype);
return literal;
}
}
else if (termType.equals(R2RmlVocabulary.BLANK_NODE)) {
throw new SemantikaRuntimeException("Blank node is not supported yet");
}
throw new SemantikaRuntimeException("Unknown term type \"" + termType + "\"");
}
private List<SqlColumn> getColumnTerms(List<String> columnNames)
{
List<SqlColumn> toReturn = new ArrayList<SqlColumn>();
for (String columnName : columnNames) {
toReturn.add(getColumnTerm(columnName));
}
return toReturn;
}
}
| Code tidy. | src/com/obidea/semantika/mapping/parser/r2rml/R2RmlMappingHandler.java | Code tidy. |
|
Java | apache-2.0 | 62e45d489b2037303d549b3579cbc806d1deaf43 | 0 | skoulouzis/lobcder,skoulouzis/lobcder,skoulouzis/lobcder | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package nl.uva.cs.lobcder.tests;
import com.sun.jersey.api.client.GenericType;
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.api.json.JSONConfiguration;
import com.sun.jersey.core.util.MultivaluedMapImpl;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.httpclient.UsernamePasswordCredentials;
import org.apache.commons.httpclient.auth.AuthScope;
import org.apache.commons.httpclient.contrib.ssl.EasySSLProtocolSocketFactory;
import org.apache.commons.httpclient.protocol.Protocol;
import org.apache.commons.httpclient.protocol.ProtocolSocketFactory;
import org.junit.After;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.Test;
import java.security.SecureRandom;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSession;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.config.ClientConfig;
import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.client.urlconnection.HTTPSProperties;
import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import java.security.NoSuchAlgorithmException;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import org.apache.jackrabbit.webdav.DavException;
/**
*
* @author S. koulouzis
*/
public class TestREST {
private String root;
private URI uri;
private String username, password;
private HttpClient client;
private String testres1;
private String testres2;
// private String testcol;
private String restURL;
private Client restClient;
// private String testResourceId;
private String translatorURL;
private String mrURL;
private Utils utils;
private Boolean quckTest;
@Before
public void setUp() throws Exception {
// String propBasePath = System.getProperty("user.home") + File.separator
// + "workspace" + File.separator + "lobcder-tests"
// + File.separator + "etc" + File.separator + "test.properties";
String propBasePath = "etc" + File.separator + "test.properties";
Properties prop = TestSettings.getTestProperties(propBasePath);
String testURL = prop.getProperty("webdav.test.url", "http://localhost:8080/lobcder/dav");
assertTrue(testURL != null);
if (!testURL.endsWith("/")) {
testURL = testURL + "/";
}
this.uri = URI.create(testURL);
this.root = this.uri.toASCIIString();
if (!this.root.endsWith("/")) {
this.root += "/";
}
this.username = prop.getProperty(("webdav.test.username1"), "user");
assertTrue(username != null);
this.password = prop.getProperty(("webdav.test.password1"), "token0");
assertTrue(password != null);
int port = uri.getPort();
if (port == -1) {
port = 443;
}
ProtocolSocketFactory socketFactory =
new EasySSLProtocolSocketFactory();
Protocol https = new Protocol("https", socketFactory, port);
Protocol.registerProtocol("https", https);
this.client = new HttpClient();
this.client.getState().setCredentials(
new AuthScope(this.uri.getHost(), this.uri.getPort()),
new UsernamePasswordCredentials(this.username, this.password));
restURL = prop.getProperty(("rest.test.url"), "http://localhost:8080/lobcder/rest/");
// testResourceId = "testResourceId";
// testcol = this.root + testResourceId + "/";
translatorURL = prop.getProperty(("translator.test.url"), "http://localhost:8080/lobcder/urest/");
mrURL = prop.getProperty(("metadata.repository.url"), "http://vphshare.atosresearch.eu/metadata-extended/rest/metadata");
quckTest = Boolean.valueOf(prop.getProperty(("test.quick"), "true"));
ClientConfig clientConfig = configureClient();
// SSLContext ctx = SSLContext.getInstance("SSL");
// clientConfig.getProperties().put(HTTPSProperties.PROPERTY_HTTPS_PROPERTIES, new HTTPSProperties(hostnameVerifier, ctx));
clientConfig.getFeatures().put(JSONConfiguration.FEATURE_POJO_MAPPING, Boolean.TRUE);
restClient = Client.create(clientConfig);
restClient.addFilter(new com.sun.jersey.api.client.filter.HTTPBasicAuthFilter(username, password));
utils = new Utils(client);
}
@After
public void tearDown() throws Exception {
}
@Test
public void testQueryItems() throws IOException {
System.err.println("testQueryItems");
String testcol = root + "testResourceForQueryItems/";
try {
utils.createCollection(testcol, true);
utils.createFile(this.root + "testResourceForQueryItems" + "/file1", true);
WebResource webResource = restClient.resource(restURL);
MultivaluedMap<String, String> params = new MultivaluedMapImpl();
params.add("path", "/testResourceForQueryItems");
WebResource res = webResource.path("items").path("query").queryParams(params);
// ClientResponse response = res.put(ClientResponse.class);
// assertTrue("status: " + response.getStatus(), response.getStatus() == HttpStatus.SC_OK || response.getStatus() == HttpStatus.SC_NO_CONTENT);
List<LogicalDataWrapped> list = res.accept(MediaType.APPLICATION_XML).
get(new GenericType<List<LogicalDataWrapped>>() {
});
assertNotNull(list);
assertFalse(list.isEmpty());
LogicalDataWrapped logicalDataWrapped = null;
for (LogicalDataWrapped ldw : list) {
utils.checkLogicalDataWrapped(ldw);
if (ldw.path.equals("/testResourceForQueryItems") && ldw.logicalData.type.equals("logical.folder")) {
logicalDataWrapped = ldw;
break;
}
}
assertNotNull(logicalDataWrapped);
for (Permissions p : logicalDataWrapped.permissions) {
assertEquals(username, p.owner);
assertTrue(p.read.contains("admin"));
// for (String s : p.read) {
// System.err.println("Read:" + s);
// }
// assertNull(p.write);
// for (String s : p.write) {
// System.err.println("write:" + s);
// }
}
} finally {
utils.deleteResource(testcol, false);
}
}
@Test
public void testQueryItem() throws IOException {
System.err.println("testQueryItem");
String testcol = root + "testResourceForQueryItem/";
String testURI1 = testcol + "file1";
try {
utils.createCollection(testcol, true);
utils.createFile(this.root + testURI1, true);
WebResource webResource = restClient.resource(restURL);
MultivaluedMap<String, String> params = new MultivaluedMapImpl();
params.add("path", "/testResourceForQueryItem");
WebResource res = webResource.path("items").path("query").queryParams(params);
List<LogicalDataWrapped> list = res.accept(MediaType.APPLICATION_XML).
get(new GenericType<List<LogicalDataWrapped>>() {
});
assertNotNull(list);
assertFalse(list.isEmpty());
LogicalDataWrapped logicalDataWrapped = null;
for (LogicalDataWrapped lwd : list) {
if (lwd.logicalData.type.equals("logical.file") && lwd.logicalData.name.equals("file1")) {
logicalDataWrapped = lwd;
}
}
assertNotNull(logicalDataWrapped);
assertFalse(logicalDataWrapped.logicalData.supervised);
// assertEquals(logicalDataWrapped.logicalData.parent, "/testResourceId");
assertEquals("text/plain; charset=UTF-8", logicalDataWrapped.logicalData.contentTypesAsString);
//Get the uid
int fileUID = logicalDataWrapped.logicalData.uid;
res = webResource.path("item").path("query").path(String.valueOf(fileUID));
LogicalDataWrapped theFile = res.accept(MediaType.APPLICATION_XML).
get(new GenericType<LogicalDataWrapped>() {
});
assertEquals(fileUID, theFile.logicalData.uid);
assertNotNull(theFile);
assertEquals(theFile.logicalData.type, "logical.file");
for (Permissions p : theFile.permissions) {
assertEquals(p.owner, username);
assertTrue(p.read.contains("admin"));
// for (String s : p.read) {
// System.err.println("Read:" + s);
// }
// assertNull(p.write);
// for (String s : p.write) {
// System.err.println("write:" + s);
// }
}
assertEquals(theFile.logicalData.name, "file1");
assertFalse(theFile.logicalData.supervised);
// assertEquals(theFile.logicalData.parent, "/testResourceId");
assertEquals("text/plain; charset=UTF-8", theFile.logicalData.contentTypesAsString);
} finally {
utils.deleteResource(testcol, false);
}
}
@Test
public void testDataItem() throws IOException {
System.err.println("testDataItem");
String testcol = root + "testResourceForDataItem/";
String testURI1 = testcol + "file1";
try {
utils.createCollection(testcol, true);
utils.createFile(this.root + testURI1, true);
WebResource webResource = restClient.resource(restURL);
MultivaluedMap<String, String> params = new MultivaluedMapImpl();
params.add("path", "/testResourceForDataItem");
WebResource res = webResource.path("items").path("query").queryParams(params);
List<LogicalDataWrapped> list = res.accept(MediaType.APPLICATION_XML).
get(new GenericType<List<LogicalDataWrapped>>() {
});
assertNotNull(list);
assertFalse(list.isEmpty());
LogicalDataWrapped logicalDataWrapped = null;
for (LogicalDataWrapped ldw : list) {
utils.checkLogicalDataWrapped(ldw);
if (ldw.logicalData.type.equals("logical.file") && ldw.logicalData.name.equals("file1")) {
logicalDataWrapped = ldw;
}
}
assertNotNull(logicalDataWrapped);
for (Permissions p : logicalDataWrapped.permissions) {
assertEquals(p.owner, username);
assertTrue(p.read.contains("admin"));
// for (String s : p.read) {
// System.err.println("Read:" + s);
// }
// assertNull(p.write);
// for (String s : p.write) {
// System.err.println("write:" + s);
// }
}
assertFalse(logicalDataWrapped.logicalData.supervised);
// assertEquals(logicalDataWrapped.logicalData.parent, "/testResourceId");
assertEquals("text/plain; charset=UTF-8", logicalDataWrapped.logicalData.contentTypesAsString);
// //Get the uid
// int fileUID = logicalDataWrapped.logicalData.uid;
// res = webResource.path("item").path("data").path(String.valueOf(fileUID));
// ClientResponse response = res.get(ClientResponse.class);
// assertTrue("status: " + response.getStatus(), response.getStatus() == HttpStatus.SC_OK);
// InputStream ins = response.getEntityInputStream();
// byte[] d = new byte[3];
// ins.read(d);
// ins.close();
// assertEquals(new String(d), "foo");
} finally {
utils.deleteResource(testcol, false);
}
}
// @Test
// public void testReservation() throws IOException {
// System.err.println("testReservation");
// try {
// createCollection();
// //Wait for replication
// Thread.sleep(15000);
//
//
// // /rest/reservation/get_workers/?id=all
// WebResource webResource = restClient.resource(restURL);
//
// //Get list of workers
// MultivaluedMap<String, String> params = new MultivaluedMapImpl();
// params.add("id", "all");
// WebResource res = webResource.path("reservation").path("get_workers").queryParams(params);
// List<WorkerStatus> workersList = res.accept(MediaType.APPLICATION_XML).
// get(new GenericType<List<WorkerStatus>>() {
// });
//
//
// //If we have workers ask for a path reservation
// if (workersList != null && workersList.size() > 0) {
// //rest/reservation/5455/request/?dataPath=/&storageSiteHost=sps1&storageSiteHost=sps2&storageSiteHost=sps3
// params = new MultivaluedMapImpl();
// String dataPath = "file1";
// params.add("dataName", dataPath);
// for (WorkerStatus w : workersList) {
// params.add("storageSiteHost", w.hostName);
// }
//
// res = webResource.path("reservation").path("some_communication_id").path("request").queryParams(params);
// ReservationInfo info = res.accept(MediaType.APPLICATION_XML).
// get(new GenericType<ReservationInfo>() {
// });
//
// assertNotNull(info);
// assertNotNull(info.communicationID);
// assertNotNull(info.storageHost);
// assertNotNull(info.storageHostIndex);
// assertNotNull(info.workerDataAccessURL);
//
//
// //Check if worker is ready
// params = new MultivaluedMapImpl();
// params.add("host", info.storageHost);
//
//
// res = webResource.path("reservation").path("workers").queryParams(params);
// List<WorkerStatus> list = res.accept(MediaType.APPLICATION_XML).
// get(new GenericType<List<WorkerStatus>>() {
// });
//
// assertNotNull(list);
// assertFalse(list.isEmpty());
// for (WorkerStatus w : list) {
// assertNotNull(w.status);
// assertNotNull(w.hostName);
// assertEquals("READY", w.status);
// }
//
// //Now get the file
// GetMethod get = new GetMethod(info.workerDataAccessURL);
// int status = client.executeMethod(get);
// assertEquals(HttpStatus.SC_OK, status);
// assertEquals("foo", get.getResponseBodyAsString());
//
//
//
//
// //run without host names
// params = new MultivaluedMapImpl();
// dataPath = "file1";
// params.add("dataName", dataPath);
// res = webResource.path("reservation").path("some_communication_id").path("request").queryParams(params);
// info = res.accept(MediaType.APPLICATION_XML).
// get(new GenericType<ReservationInfo>() {
// });
//
// assertNotNull(info);
// assertNotNull(info.communicationID);
// assertNotNull(info.storageHostIndex);
// assertNotNull(info.workerDataAccessURL);
//
//
// //Now get the file
// get = new GetMethod(info.workerDataAccessURL);
// status = client.executeMethod(get);
// assertEquals(HttpStatus.SC_OK, status);
// assertEquals("foo", get.getResponseBodyAsString());
//
// }
// } catch (Exception ex) {
// Logger.getLogger(TestREST.class.getName()).log(Level.SEVERE, null, ex);
// } finally {
// utils.deleteResource(testcol, false);
// }
// }
// @Test
// public void testGetWorkersStatus() throws IOException {
// System.err.println("testGetWorkersStatus");
// try {
// createCollection();
// WebResource webResource = restClient.resource(restURL);
//// rest/reservation/workers/?host=kscvdfv&host=sp2&host=192.168.1.1
// MultivaluedMap<String, String> params = new MultivaluedMapImpl();
// params.add("host", "host1");
// params.add("host", "host2");
// params.add("host", "host3");
//
// WebResource res = webResource.path("reservation").path("workers").queryParams(params);
// List<WorkerStatus> list = res.accept(MediaType.APPLICATION_XML).
// get(new GenericType<List<WorkerStatus>>() {
// });
//
// assertNotNull(list);
// assertFalse(list.isEmpty());
// for (WorkerStatus w : list) {
// assertNotNull(w.status);
// assertNotNull(w.hostName);
// }
//
//
// } finally {
// utils.deleteResource(testcol, false);
// }
// }
@Test
public void testTicketTranslator() throws IOException {
System.err.println("testTicketTranslator");
String testcol = root + "testResourceForTicketTranslator/";
String testURI1 = testcol + "file1";
try {
utils.createCollection(testcol, true);
utils.createFile(this.root + testURI1, true);
ClientConfig clientConfig = new DefaultClientConfig();
clientConfig.getFeatures().put(JSONConfiguration.FEATURE_POJO_MAPPING, Boolean.TRUE);
Client nonAuthRestClient = Client.create(clientConfig);
WebResource webResource = nonAuthRestClient.resource(translatorURL);
WebResource res = webResource.path("getshort").path(password);
String shortToken = res.accept(MediaType.TEXT_PLAIN).get(String.class);
assertNotNull(shortToken);
Client shortAuthRestClient = Client.create(clientConfig);
shortAuthRestClient.addFilter(new com.sun.jersey.api.client.filter.HTTPBasicAuthFilter(username, shortToken));
webResource = shortAuthRestClient.resource(restURL);
MultivaluedMap<String, String> params = new MultivaluedMapImpl();
params.add("path", "/testResourceForTicketTranslator");
res = webResource.path("items").path("query").queryParams(params);
List<LogicalDataWrapped> list = res.accept(MediaType.APPLICATION_XML).
get(new GenericType<List<LogicalDataWrapped>>() {
});
assertNotNull(list);
assertFalse(list.isEmpty());
} finally {
utils.deleteResource(testcol, false);
}
}
@Test
public void testMetadataService() throws IOException, JAXBException {
if (quckTest) {
return;
}
System.err.println("testMetadataService");
String testcol = root + "testResourceForMetadataService/";
String testURI1 = testcol + "file1";
try {
utils.createCollection(testcol, true);
utils.createFile(this.root + testURI1, true);
WebResource webResource = restClient.resource(restURL);
MultivaluedMap<String, String> params = new MultivaluedMapImpl();
params.add("path", "/testResourceId");
WebResource res = webResource.path("items").path("query").queryParams(params);
List<LogicalDataWrapped> list = res.accept(MediaType.APPLICATION_XML).
get(new GenericType<List<LogicalDataWrapped>>() {
});
assertNotNull(list);
assertFalse(list.isEmpty());
LogicalDataWrapped logicalDataWrapped = null;
Client mrClient = Client.create();
for (LogicalDataWrapped ldw : list) {
utils.checkLogicalDataWrapped(ldw);
if (ldw.logicalData.type.equals("logical.file") && ldw.logicalData.name.equals("file1")) {
logicalDataWrapped = ldw;
}
params = new MultivaluedMapImpl();
params.add("logicalExpression", "name=%22" + ldw.logicalData.name + "%22");
params.add("logicalExpression", "description=%22LOBCDER%22");
webResource = mrClient.resource(mrURL).path("filter").queryParams(params);
Thread.sleep(30000);
String response = webResource.get(String.class);
String idStr = response.substring(response.indexOf("<localID>") + "<localID>".length(), response.indexOf("</localID>"));
assertEquals(Integer.valueOf(ldw.logicalData.uid), Integer.valueOf(idStr));
System.err.println(ldw.logicalData.name + ": ok");
}
assertNotNull(logicalDataWrapped);
} catch (InterruptedException ex) {
Logger.getLogger(TestREST.class.getName()).log(Level.SEVERE, null, ex);
} finally {
utils.deleteResource(testcol, false);
}
}
@Test
public void testSetSpeed() throws JAXBException {
System.err.println("testSetSpeed");
Stats stats = new Stats();
stats.destination = "192.168.100.5";
stats.source = "192.168.100.1";
stats.size = Long.valueOf(102400);
stats.speed = 11.5;
JAXBContext context = JAXBContext.newInstance(Stats.class);
Marshaller m = context.createMarshaller();
m.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, Boolean.TRUE);
OutputStream out = new ByteArrayOutputStream();
m.marshal(stats, out);
WebResource webResource = restClient.resource(restURL);
String stringStats = String.valueOf(out);
ClientResponse response = webResource.path("lob_statistics").path("set")
.type(MediaType.APPLICATION_XML).put(ClientResponse.class, stringStats);
if (response.getClientResponseStatus() != ClientResponse.Status.NO_CONTENT) {
fail();
}
// fail();
}
@Test
public void testArchiveService() throws JAXBException, IOException, DavException, NoSuchAlgorithmException {
System.err.println("testArchiveService");
String testcol = root + "testResourceForArchiveService/";
String testFileURI1 = testcol + TestSettings.TEST_FILE_NAME1;
List<File> unzipedFiles = null;
File randomFile = null;
try {
utils.deleteResource(testcol, false);
utils.createCollection(testcol, true);
randomFile = utils.createRandomFile("/tmp/" + TestSettings.TEST_FILE_NAME1, 1);
//If the destination is set to this.root+testResourceId + "/file1" someone is asking for /login.html ???!!!!
utils.postFile(randomFile, testcol);
String localFileChecksum = utils.getChecksum(randomFile, "SHA1");
utils.waitForReplication(testFileURI1);
File zipFile = utils.DownloadFile(restURL + "/compress/getzip/testResourceForArchiveService", "/tmp/testResourceForArchiveService.zip", true);
unzipedFiles = utils.unzipFile(zipFile);
for (File f : unzipedFiles) {
String checksumFromDownloaded = utils.getChecksum(f, "SHA1");
assertEquals(localFileChecksum, checksumFromDownloaded);
}
} catch (InterruptedException ex) {
Logger.getLogger(TestREST.class.getName()).log(Level.SEVERE, null, ex);
} finally {
utils.deleteResource(testcol, false);
if (unzipedFiles != null && !unzipedFiles.isEmpty()) {
for (File f : unzipedFiles) {
if (f != null) {
f.delete();
}
}
}
if (randomFile != null) {
randomFile.delete();
}
}
}
@Test
public void testTTLService() throws JAXBException, IOException, DavException, InterruptedException {
if (quckTest) {
return;
}
System.err.println("testTTLService");
String testcol = root + "testResourceForTTLService/";
try {
utils.createCollection(testcol, true);
Long uid = utils.getResourceUID(testcol);
WebResource webResource = restClient.resource(restURL);
WebResource res = webResource.path("ttl").path(String.valueOf(uid)).path("3");
ClientResponse response = res.put(ClientResponse.class);
assertTrue("status: " + response.getStatus(), response.getStatus() == HttpStatus.SC_OK || response.getStatus() == HttpStatus.SC_NO_CONTENT);
// PUT https://lobcder.vph.cyfronet.pl/lobcder/rest/ttl/{uid}/{ttl}
int count = 0;
while (utils.resourceExists(testcol)) {
count++;
if (count > 200) {
fail("Resource " + testcol + " is not deleted. It should be gone");
break;
}
Thread.sleep(20000);
}
utils.deleteResource(testcol, false);
utils.createCollection(testcol, true);
webResource = restClient.resource(restURL);
//PUT https://lobcder.vph.cyfronet.pl/lobcder/rest/ttl/{ttl}?path=/path/to/entry
MultivaluedMap<String, String> params = new MultivaluedMapImpl();
params.add("path", "/testResourceForTTLService");
res = webResource.path("ttl").path(String.valueOf("3")).queryParams(params);
response = res.put(ClientResponse.class);
assertTrue("status: " + response.getStatus(), response.getStatus() == HttpStatus.SC_OK || response.getStatus() == HttpStatus.SC_NO_CONTENT);
count = 0;
while (utils.resourceExists(testcol)) {
count++;
if (count > 200) {
fail("Resource " + testcol + " is not deleted. It should be gone");
break;
}
Thread.sleep(20000);
}
} finally {
utils.deleteResource(testcol, false);
}
}
public static ClientConfig configureClient() {
TrustManager[] certs = new TrustManager[]{
new X509TrustManager() {
@Override
public X509Certificate[] getAcceptedIssuers() {
return null;
}
@Override
public void checkServerTrusted(X509Certificate[] chain, String authType)
throws CertificateException {
}
@Override
public void checkClientTrusted(X509Certificate[] chain, String authType)
throws CertificateException {
}
}
};
SSLContext ctx = null;
try {
ctx = SSLContext.getInstance("TLS");
ctx.init(null, certs, new SecureRandom());
} catch (java.security.GeneralSecurityException ex) {
}
HttpsURLConnection.setDefaultSSLSocketFactory(ctx.getSocketFactory());
ClientConfig config = new DefaultClientConfig();
try {
config.getProperties().put(HTTPSProperties.PROPERTY_HTTPS_PROPERTIES, new HTTPSProperties(
new HostnameVerifier() {
@Override
public boolean verify(String hostname, SSLSession session) {
return true;
}
},
ctx));
} catch (Exception e) {
}
return config;
}
@XmlRootElement
public static class LogicalDataWrapped {
public LogicalData logicalData;
public String path;
public Set<PDRIDesc> pdriList;
public Set<Permissions> permissions;
}
@XmlRootElement
public static class LogicalData {
public int checksum;
public String contentTypesAsString;
public long createDate;
public long lastValidationDate;
public long length;
public int lockTimeout;
public long modifiedDate;
public String name;
public String owner;
public int parentRef;
public int pdriGroupId;
public boolean supervised;
public String type;
public int uid;
}
@XmlRootElement
public static class Permissions {
public String owner;
public Set<String> read;
public Set<String> write;
}
@XmlRootElement
public static class PDRIDesc {
public String name;
public String password;
public String resourceUrl;
public String username;
}
@XmlRootElement
public static class ReservationInfo {
@XmlElement(name = "communicationID")
private String communicationID;
@XmlElement(name = "storageHost")
private String storageHost;
@XmlElement(name = "storageHostIndex")
private int storageHostIndex;
@XmlElement(name = "workerDataAccessURL")
private String workerDataAccessURL;
}
@XmlRootElement
public static class WorkerStatus {
@XmlElement(name = "hostName")
private String hostName;
@XmlElement(name = "status")
private String status;
}
@XmlRootElement
public static class Stats {
@XmlElement(name = "source")
String source;
@XmlElement(name = "destination")
String destination;
@XmlElement(name = "size")
Long size;
@XmlElement(name = "speed")
Double speed;
}
}
| lobcder-tests/src/test/java/nl/uva/cs/lobcder/tests/TestREST.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package nl.uva.cs.lobcder.tests;
import com.sun.jersey.api.client.GenericType;
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.api.json.JSONConfiguration;
import com.sun.jersey.core.util.MultivaluedMapImpl;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.httpclient.UsernamePasswordCredentials;
import org.apache.commons.httpclient.auth.AuthScope;
import org.apache.commons.httpclient.contrib.ssl.EasySSLProtocolSocketFactory;
import org.apache.commons.httpclient.protocol.Protocol;
import org.apache.commons.httpclient.protocol.ProtocolSocketFactory;
import org.junit.After;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.Test;
import java.security.SecureRandom;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSession;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.config.ClientConfig;
import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.client.urlconnection.HTTPSProperties;
import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import java.security.NoSuchAlgorithmException;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import org.apache.jackrabbit.webdav.DavException;
/**
*
* @author S. koulouzis
*/
public class TestREST {
private String root;
private URI uri;
private String username, password;
private HttpClient client;
private String testres1;
private String testres2;
// private String testcol;
private String restURL;
private Client restClient;
// private String testResourceId;
private String translatorURL;
private String mrURL;
private Utils utils;
private Boolean quckTest;
@Before
public void setUp() throws Exception {
// String propBasePath = System.getProperty("user.home") + File.separator
// + "workspace" + File.separator + "lobcder-tests"
// + File.separator + "etc" + File.separator + "test.properties";
String propBasePath = "etc" + File.separator + "test.properties";
Properties prop = TestSettings.getTestProperties(propBasePath);
String testURL = prop.getProperty("webdav.test.url", "http://localhost:8080/lobcder/dav");
assertTrue(testURL != null);
if (!testURL.endsWith("/")) {
testURL = testURL + "/";
}
this.uri = URI.create(testURL);
this.root = this.uri.toASCIIString();
if (!this.root.endsWith("/")) {
this.root += "/";
}
this.username = prop.getProperty(("webdav.test.username1"), "user");
assertTrue(username != null);
this.password = prop.getProperty(("webdav.test.password1"), "token0");
assertTrue(password != null);
int port = uri.getPort();
if (port == -1) {
port = 443;
}
ProtocolSocketFactory socketFactory =
new EasySSLProtocolSocketFactory();
Protocol https = new Protocol("https", socketFactory, port);
Protocol.registerProtocol("https", https);
this.client = new HttpClient();
this.client.getState().setCredentials(
new AuthScope(this.uri.getHost(), this.uri.getPort()),
new UsernamePasswordCredentials(this.username, this.password));
restURL = prop.getProperty(("rest.test.url"), "http://localhost:8080/lobcder/rest/");
// testResourceId = "testResourceId";
// testcol = this.root + testResourceId + "/";
translatorURL = prop.getProperty(("translator.test.url"), "http://localhost:8080/lobcder/urest/");
mrURL = prop.getProperty(("metadata.repository.url"), "http://vphshare.atosresearch.eu/metadata-extended/rest/metadata");
quckTest = Boolean.valueOf(prop.getProperty(("test.quick"), "true"));
ClientConfig clientConfig = configureClient();
// SSLContext ctx = SSLContext.getInstance("SSL");
// clientConfig.getProperties().put(HTTPSProperties.PROPERTY_HTTPS_PROPERTIES, new HTTPSProperties(hostnameVerifier, ctx));
clientConfig.getFeatures().put(JSONConfiguration.FEATURE_POJO_MAPPING, Boolean.TRUE);
restClient = Client.create(clientConfig);
restClient.addFilter(new com.sun.jersey.api.client.filter.HTTPBasicAuthFilter(username, password));
utils = new Utils(client);
}
@After
public void tearDown() throws Exception {
}
@Test
public void testQueryItems() throws IOException {
System.err.println("testQueryItems");
String testcol = root + "testResourceForQueryItems/";
try {
utils.createCollection(testcol, true);
utils.createFile(this.root + "testResourceForQueryItems" + "/file1", true);
WebResource webResource = restClient.resource(restURL);
MultivaluedMap<String, String> params = new MultivaluedMapImpl();
params.add("path", "/testResourceForQueryItems");
WebResource res = webResource.path("items").path("query").queryParams(params);
// ClientResponse response = res.put(ClientResponse.class);
// assertTrue("status: " + response.getStatus(), response.getStatus() == HttpStatus.SC_OK || response.getStatus() == HttpStatus.SC_NO_CONTENT);
List<LogicalDataWrapped> list = res.accept(MediaType.APPLICATION_XML).
get(new GenericType<List<LogicalDataWrapped>>() {
});
assertNotNull(list);
assertFalse(list.isEmpty());
LogicalDataWrapped logicalDataWrapped = null;
for (LogicalDataWrapped ldw : list) {
utils.checkLogicalDataWrapped(ldw);
if (ldw.path.equals("/testResourceForQueryItems") && ldw.logicalData.type.equals("logical.folder")) {
logicalDataWrapped = ldw;
break;
}
}
assertNotNull(logicalDataWrapped);
for (Permissions p : logicalDataWrapped.permissions) {
assertEquals(username, p.owner);
assertTrue(p.read.contains("admin"));
// for (String s : p.read) {
// System.err.println("Read:" + s);
// }
// assertNull(p.write);
// for (String s : p.write) {
// System.err.println("write:" + s);
// }
}
} finally {
utils.deleteResource(testcol, false);
}
}
@Test
public void testQueryItem() throws IOException {
System.err.println("testQueryItem");
String testcol = root + "testResourceForQueryItem/";
String testURI1 = testcol + "file1";
try {
utils.createCollection(testcol, true);
utils.createFile(this.root + testURI1, true);
WebResource webResource = restClient.resource(restURL);
MultivaluedMap<String, String> params = new MultivaluedMapImpl();
params.add("path", "/testResourceId");
WebResource res = webResource.path("items").path("query").queryParams(params);
List<LogicalDataWrapped> list = res.accept(MediaType.APPLICATION_XML).
get(new GenericType<List<LogicalDataWrapped>>() {
});
assertNotNull(list);
assertFalse(list.isEmpty());
LogicalDataWrapped logicalDataWrapped = null;
for (LogicalDataWrapped lwd : list) {
if (lwd.logicalData.type.equals("logical.file") && lwd.logicalData.name.equals("file1")) {
logicalDataWrapped = lwd;
}
}
assertNotNull(logicalDataWrapped);
assertFalse(logicalDataWrapped.logicalData.supervised);
// assertEquals(logicalDataWrapped.logicalData.parent, "/testResourceId");
assertEquals("text/plain; charset=UTF-8", logicalDataWrapped.logicalData.contentTypesAsString);
//Get the uid
int fileUID = logicalDataWrapped.logicalData.uid;
res = webResource.path("item").path("query").path(String.valueOf(fileUID));
LogicalDataWrapped theFile = res.accept(MediaType.APPLICATION_XML).
get(new GenericType<LogicalDataWrapped>() {
});
assertEquals(fileUID, theFile.logicalData.uid);
assertNotNull(theFile);
assertEquals(theFile.logicalData.type, "logical.file");
for (Permissions p : theFile.permissions) {
assertEquals(p.owner, username);
assertTrue(p.read.contains("admin"));
// for (String s : p.read) {
// System.err.println("Read:" + s);
// }
// assertNull(p.write);
// for (String s : p.write) {
// System.err.println("write:" + s);
// }
}
assertEquals(theFile.logicalData.name, "file1");
assertFalse(theFile.logicalData.supervised);
// assertEquals(theFile.logicalData.parent, "/testResourceId");
assertEquals("text/plain; charset=UTF-8", theFile.logicalData.contentTypesAsString);
} finally {
utils.deleteResource(testcol, false);
}
}
@Test
public void testDataItem() throws IOException {
System.err.println("testDataItem");
String testcol = root + "testResourceForDataItem/";
String testURI1 = testcol + "file1";
try {
utils.createCollection(testcol, true);
utils.createFile(this.root + testURI1, true);
WebResource webResource = restClient.resource(restURL);
MultivaluedMap<String, String> params = new MultivaluedMapImpl();
params.add("path", "/testResourceId");
WebResource res = webResource.path("items").path("query").queryParams(params);
List<LogicalDataWrapped> list = res.accept(MediaType.APPLICATION_XML).
get(new GenericType<List<LogicalDataWrapped>>() {
});
assertNotNull(list);
assertFalse(list.isEmpty());
LogicalDataWrapped logicalDataWrapped = null;
for (LogicalDataWrapped ldw : list) {
utils.checkLogicalDataWrapped(ldw);
if (ldw.logicalData.type.equals("logical.file") && ldw.logicalData.name.equals("file1")) {
logicalDataWrapped = ldw;
}
}
assertNotNull(logicalDataWrapped);
for (Permissions p : logicalDataWrapped.permissions) {
assertEquals(p.owner, username);
assertTrue(p.read.contains("admin"));
// for (String s : p.read) {
// System.err.println("Read:" + s);
// }
// assertNull(p.write);
// for (String s : p.write) {
// System.err.println("write:" + s);
// }
}
assertFalse(logicalDataWrapped.logicalData.supervised);
// assertEquals(logicalDataWrapped.logicalData.parent, "/testResourceId");
assertEquals("text/plain; charset=UTF-8", logicalDataWrapped.logicalData.contentTypesAsString);
// //Get the uid
// int fileUID = logicalDataWrapped.logicalData.uid;
// res = webResource.path("item").path("data").path(String.valueOf(fileUID));
// ClientResponse response = res.get(ClientResponse.class);
// assertTrue("status: " + response.getStatus(), response.getStatus() == HttpStatus.SC_OK);
// InputStream ins = response.getEntityInputStream();
// byte[] d = new byte[3];
// ins.read(d);
// ins.close();
// assertEquals(new String(d), "foo");
} finally {
utils.deleteResource(testcol, false);
}
}
// @Test
// public void testReservation() throws IOException {
// System.err.println("testReservation");
// try {
// createCollection();
// //Wait for replication
// Thread.sleep(15000);
//
//
// // /rest/reservation/get_workers/?id=all
// WebResource webResource = restClient.resource(restURL);
//
// //Get list of workers
// MultivaluedMap<String, String> params = new MultivaluedMapImpl();
// params.add("id", "all");
// WebResource res = webResource.path("reservation").path("get_workers").queryParams(params);
// List<WorkerStatus> workersList = res.accept(MediaType.APPLICATION_XML).
// get(new GenericType<List<WorkerStatus>>() {
// });
//
//
// //If we have workers ask for a path reservation
// if (workersList != null && workersList.size() > 0) {
// //rest/reservation/5455/request/?dataPath=/&storageSiteHost=sps1&storageSiteHost=sps2&storageSiteHost=sps3
// params = new MultivaluedMapImpl();
// String dataPath = "file1";
// params.add("dataName", dataPath);
// for (WorkerStatus w : workersList) {
// params.add("storageSiteHost", w.hostName);
// }
//
// res = webResource.path("reservation").path("some_communication_id").path("request").queryParams(params);
// ReservationInfo info = res.accept(MediaType.APPLICATION_XML).
// get(new GenericType<ReservationInfo>() {
// });
//
// assertNotNull(info);
// assertNotNull(info.communicationID);
// assertNotNull(info.storageHost);
// assertNotNull(info.storageHostIndex);
// assertNotNull(info.workerDataAccessURL);
//
//
// //Check if worker is ready
// params = new MultivaluedMapImpl();
// params.add("host", info.storageHost);
//
//
// res = webResource.path("reservation").path("workers").queryParams(params);
// List<WorkerStatus> list = res.accept(MediaType.APPLICATION_XML).
// get(new GenericType<List<WorkerStatus>>() {
// });
//
// assertNotNull(list);
// assertFalse(list.isEmpty());
// for (WorkerStatus w : list) {
// assertNotNull(w.status);
// assertNotNull(w.hostName);
// assertEquals("READY", w.status);
// }
//
// //Now get the file
// GetMethod get = new GetMethod(info.workerDataAccessURL);
// int status = client.executeMethod(get);
// assertEquals(HttpStatus.SC_OK, status);
// assertEquals("foo", get.getResponseBodyAsString());
//
//
//
//
// //run without host names
// params = new MultivaluedMapImpl();
// dataPath = "file1";
// params.add("dataName", dataPath);
// res = webResource.path("reservation").path("some_communication_id").path("request").queryParams(params);
// info = res.accept(MediaType.APPLICATION_XML).
// get(new GenericType<ReservationInfo>() {
// });
//
// assertNotNull(info);
// assertNotNull(info.communicationID);
// assertNotNull(info.storageHostIndex);
// assertNotNull(info.workerDataAccessURL);
//
//
// //Now get the file
// get = new GetMethod(info.workerDataAccessURL);
// status = client.executeMethod(get);
// assertEquals(HttpStatus.SC_OK, status);
// assertEquals("foo", get.getResponseBodyAsString());
//
// }
// } catch (Exception ex) {
// Logger.getLogger(TestREST.class.getName()).log(Level.SEVERE, null, ex);
// } finally {
// utils.deleteResource(testcol, false);
// }
// }
// @Test
// public void testGetWorkersStatus() throws IOException {
// System.err.println("testGetWorkersStatus");
// try {
// createCollection();
// WebResource webResource = restClient.resource(restURL);
//// rest/reservation/workers/?host=kscvdfv&host=sp2&host=192.168.1.1
// MultivaluedMap<String, String> params = new MultivaluedMapImpl();
// params.add("host", "host1");
// params.add("host", "host2");
// params.add("host", "host3");
//
// WebResource res = webResource.path("reservation").path("workers").queryParams(params);
// List<WorkerStatus> list = res.accept(MediaType.APPLICATION_XML).
// get(new GenericType<List<WorkerStatus>>() {
// });
//
// assertNotNull(list);
// assertFalse(list.isEmpty());
// for (WorkerStatus w : list) {
// assertNotNull(w.status);
// assertNotNull(w.hostName);
// }
//
//
// } finally {
// utils.deleteResource(testcol, false);
// }
// }
@Test
public void testTicketTranslator() throws IOException {
System.err.println("testTicketTranslator");
String testcol = root + "testResourceForTicketTranslator/";
String testURI1 = testcol + "file1";
try {
utils.createCollection(testcol, true);
utils.createFile(this.root + testURI1, true);
ClientConfig clientConfig = new DefaultClientConfig();
clientConfig.getFeatures().put(JSONConfiguration.FEATURE_POJO_MAPPING, Boolean.TRUE);
Client nonAuthRestClient = Client.create(clientConfig);
WebResource webResource = nonAuthRestClient.resource(translatorURL);
WebResource res = webResource.path("getshort").path(password);
String shortToken = res.accept(MediaType.TEXT_PLAIN).get(String.class);
assertNotNull(shortToken);
Client shortAuthRestClient = Client.create(clientConfig);
shortAuthRestClient.addFilter(new com.sun.jersey.api.client.filter.HTTPBasicAuthFilter(username, shortToken));
webResource = shortAuthRestClient.resource(restURL);
MultivaluedMap<String, String> params = new MultivaluedMapImpl();
params.add("path", "/testResourceId");
res = webResource.path("items").path("query").queryParams(params);
List<LogicalDataWrapped> list = res.accept(MediaType.APPLICATION_XML).
get(new GenericType<List<LogicalDataWrapped>>() {
});
assertNotNull(list);
assertFalse(list.isEmpty());
} finally {
utils.deleteResource(testcol, false);
}
}
@Test
public void testMetadataService() throws IOException, JAXBException {
if (quckTest) {
return;
}
System.err.println("testMetadataService");
String testcol = root + "testResourceForMetadataService/";
String testURI1 = testcol + "file1";
try {
utils.createCollection(testcol, true);
utils.createFile(this.root + testURI1, true);
WebResource webResource = restClient.resource(restURL);
MultivaluedMap<String, String> params = new MultivaluedMapImpl();
params.add("path", "/testResourceId");
WebResource res = webResource.path("items").path("query").queryParams(params);
List<LogicalDataWrapped> list = res.accept(MediaType.APPLICATION_XML).
get(new GenericType<List<LogicalDataWrapped>>() {
});
assertNotNull(list);
assertFalse(list.isEmpty());
LogicalDataWrapped logicalDataWrapped = null;
Client mrClient = Client.create();
for (LogicalDataWrapped ldw : list) {
utils.checkLogicalDataWrapped(ldw);
if (ldw.logicalData.type.equals("logical.file") && ldw.logicalData.name.equals("file1")) {
logicalDataWrapped = ldw;
}
params = new MultivaluedMapImpl();
params.add("logicalExpression", "name=%22" + ldw.logicalData.name + "%22");
params.add("logicalExpression", "description=%22LOBCDER%22");
webResource = mrClient.resource(mrURL).path("filter").queryParams(params);
Thread.sleep(30000);
String response = webResource.get(String.class);
String idStr = response.substring(response.indexOf("<localID>") + "<localID>".length(), response.indexOf("</localID>"));
assertEquals(Integer.valueOf(ldw.logicalData.uid), Integer.valueOf(idStr));
System.err.println(ldw.logicalData.name + ": ok");
}
assertNotNull(logicalDataWrapped);
} catch (InterruptedException ex) {
Logger.getLogger(TestREST.class.getName()).log(Level.SEVERE, null, ex);
} finally {
utils.deleteResource(testcol, false);
}
}
@Test
public void testSetSpeed() throws JAXBException {
System.err.println("testSetSpeed");
Stats stats = new Stats();
stats.destination = "192.168.100.5";
stats.source = "192.168.100.1";
stats.size = Long.valueOf(102400);
stats.speed = 11.5;
JAXBContext context = JAXBContext.newInstance(Stats.class);
Marshaller m = context.createMarshaller();
m.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, Boolean.TRUE);
OutputStream out = new ByteArrayOutputStream();
m.marshal(stats, out);
WebResource webResource = restClient.resource(restURL);
String stringStats = String.valueOf(out);
ClientResponse response = webResource.path("lob_statistics").path("set")
.type(MediaType.APPLICATION_XML).put(ClientResponse.class, stringStats);
if (response.getClientResponseStatus() != ClientResponse.Status.NO_CONTENT) {
fail();
}
// fail();
}
@Test
public void testArchiveService() throws JAXBException, IOException, DavException, NoSuchAlgorithmException {
System.err.println("testArchiveService");
String testcol = root + "testResourceForArchiveService/";
String testFileURI1 = testcol + TestSettings.TEST_FILE_NAME1;
List<File> unzipedFiles = null;
File randomFile = null;
try {
utils.deleteResource(testcol, false);
utils.createCollection(testcol, true);
randomFile = utils.createRandomFile("/tmp/" + TestSettings.TEST_FILE_NAME1, 1);
//If the destination is set to this.root+testResourceId + "/file1" someone is asking for /login.html ???!!!!
utils.postFile(randomFile, testcol);
String localFileChecksum = utils.getChecksum(randomFile, "SHA1");
utils.waitForReplication(testFileURI1);
File zipFile = utils.DownloadFile(restURL + "/compress/getzip/testResourceForArchiveService", "/tmp/testResourceForArchiveService.zip", true);
unzipedFiles = utils.unzipFile(zipFile);
for (File f : unzipedFiles) {
String checksumFromDownloaded = utils.getChecksum(f, "SHA1");
assertEquals(localFileChecksum, checksumFromDownloaded);
}
} catch (InterruptedException ex) {
Logger.getLogger(TestREST.class.getName()).log(Level.SEVERE, null, ex);
} finally {
utils.deleteResource(testcol, false);
if (unzipedFiles != null && !unzipedFiles.isEmpty()) {
for (File f : unzipedFiles) {
if (f != null) {
f.delete();
}
}
}
if (randomFile != null) {
randomFile.delete();
}
}
}
@Test
public void testTTLService() throws JAXBException, IOException, DavException, InterruptedException {
if (quckTest) {
return;
}
System.err.println("testTTLService");
String testcol = root + "testResourceForTTLService/";
try {
utils.createCollection(testcol, true);
Long uid = utils.getResourceUID(testcol);
WebResource webResource = restClient.resource(restURL);
WebResource res = webResource.path("ttl").path(String.valueOf(uid)).path("3");
ClientResponse response = res.put(ClientResponse.class);
assertTrue("status: " + response.getStatus(), response.getStatus() == HttpStatus.SC_OK || response.getStatus() == HttpStatus.SC_NO_CONTENT);
// PUT https://lobcder.vph.cyfronet.pl/lobcder/rest/ttl/{uid}/{ttl}
int count = 0;
while (utils.resourceExists(testcol)) {
count++;
if (count > 200) {
fail("Resource " + testcol + " is not deleted. It should be gone");
break;
}
Thread.sleep(20000);
}
utils.deleteResource(testcol, false);
utils.createCollection(testcol, true);
webResource = restClient.resource(restURL);
//PUT https://lobcder.vph.cyfronet.pl/lobcder/rest/ttl/{ttl}?path=/path/to/entry
MultivaluedMap<String, String> params = new MultivaluedMapImpl();
params.add("path", "/testResourceForTTLService");
res = webResource.path("ttl").path(String.valueOf("3")).queryParams(params);
response = res.put(ClientResponse.class);
assertTrue("status: " + response.getStatus(), response.getStatus() == HttpStatus.SC_OK || response.getStatus() == HttpStatus.SC_NO_CONTENT);
count = 0;
while (utils.resourceExists(testcol)) {
count++;
if (count > 200) {
fail("Resource " + testcol + " is not deleted. It should be gone");
break;
}
Thread.sleep(20000);
}
} finally {
utils.deleteResource(testcol, false);
}
}
public static ClientConfig configureClient() {
TrustManager[] certs = new TrustManager[]{
new X509TrustManager() {
@Override
public X509Certificate[] getAcceptedIssuers() {
return null;
}
@Override
public void checkServerTrusted(X509Certificate[] chain, String authType)
throws CertificateException {
}
@Override
public void checkClientTrusted(X509Certificate[] chain, String authType)
throws CertificateException {
}
}
};
SSLContext ctx = null;
try {
ctx = SSLContext.getInstance("TLS");
ctx.init(null, certs, new SecureRandom());
} catch (java.security.GeneralSecurityException ex) {
}
HttpsURLConnection.setDefaultSSLSocketFactory(ctx.getSocketFactory());
ClientConfig config = new DefaultClientConfig();
try {
config.getProperties().put(HTTPSProperties.PROPERTY_HTTPS_PROPERTIES, new HTTPSProperties(
new HostnameVerifier() {
@Override
public boolean verify(String hostname, SSLSession session) {
return true;
}
},
ctx));
} catch (Exception e) {
}
return config;
}
@XmlRootElement
public static class LogicalDataWrapped {
public LogicalData logicalData;
public String path;
public Set<PDRIDesc> pdriList;
public Set<Permissions> permissions;
}
@XmlRootElement
public static class LogicalData {
public int checksum;
public String contentTypesAsString;
public long createDate;
public long lastValidationDate;
public long length;
public int lockTimeout;
public long modifiedDate;
public String name;
public String owner;
public int parentRef;
public int pdriGroupId;
public boolean supervised;
public String type;
public int uid;
}
@XmlRootElement
public static class Permissions {
public String owner;
public Set<String> read;
public Set<String> write;
}
@XmlRootElement
public static class PDRIDesc {
public String name;
public String password;
public String resourceUrl;
public String username;
}
@XmlRootElement
public static class ReservationInfo {
@XmlElement(name = "communicationID")
private String communicationID;
@XmlElement(name = "storageHost")
private String storageHost;
@XmlElement(name = "storageHostIndex")
private int storageHostIndex;
@XmlElement(name = "workerDataAccessURL")
private String workerDataAccessURL;
}
@XmlRootElement
public static class WorkerStatus {
@XmlElement(name = "hostName")
private String hostName;
@XmlElement(name = "status")
private String status;
}
@XmlRootElement
public static class Stats {
@XmlElement(name = "source")
String source;
@XmlElement(name = "destination")
String destination;
@XmlElement(name = "size")
Long size;
@XmlElement(name = "speed")
Double speed;
}
}
| Fixed folder names for TestREST | lobcder-tests/src/test/java/nl/uva/cs/lobcder/tests/TestREST.java | Fixed folder names for TestREST |
|
Java | apache-2.0 | b30fd8a9f90e79f1f66c30eb75cda86c607fa77e | 0 | Cokemonkey11/WurstScript,Cokemonkey11/WurstScript,wurstscript/WurstScript,Cokemonkey11/WurstScript,Crigges/WurstScript,peq/WurstScript,Cokemonkey11/WurstScript,Crigges/WurstScript,peq/WurstScript,Crigges/WurstScript,Crigges/WurstScript,Crigges/WurstScript,peq/WurstScript,Crigges/WurstScript,wurstscript/WurstScript,Crigges/WurstScript,wurstscript/WurstScript,peq/WurstScript,Crigges/WurstScript,Cokemonkey11/WurstScript | package de.peeeq.wurstscript.gui;
import de.peeeq.wurstscript.attributes.CompileError;
/**
* implementation for use with cli interfaces
*/
public class WurstGuiCliImpl extends WurstGui {
@Override
public void sendError(CompileError err) {
super.sendError(err);
}
@Override
public void sendProgress(String msg, double percent) {
}
@Override
public void sendFinished() {
System.out.println("done");
}
@Override
public void showInfoMessage(String message) {
System.out.println(message);
}
}
| de.peeeq.wurstscript/src/de/peeeq/wurstscript/gui/WurstGuiCliImpl.java | package de.peeeq.wurstscript.gui;
import de.peeeq.wurstscript.attributes.CompileError;
/**
* implementation for use with cli interfaces
*/
public class WurstGuiCliImpl extends WurstGui {
@Override
public void sendError(CompileError err) {
super.sendError(err);
System.out.println(err);
}
@Override
public void sendProgress(String msg, double percent) {
}
@Override
public void sendFinished() {
System.out.println("done");
}
@Override
public void showInfoMessage(String message) {
System.out.println(message);
}
}
| removed println for warnings
| de.peeeq.wurstscript/src/de/peeeq/wurstscript/gui/WurstGuiCliImpl.java | removed println for warnings |
|
Java | apache-2.0 | 7575bb73ab46b2fcdc2cbba11625c55fe57254fe | 0 | SachinMali/acs-aem-commons,bstopp/acs-aem-commons,dfoerderreuther/acs-aem-commons,SachinMali/acs-aem-commons,dfoerderreuther/acs-aem-commons,dfoerderreuther/acs-aem-commons,SachinMali/acs-aem-commons,dfoerderreuther/acs-aem-commons,Adobe-Consulting-Services/acs-aem-commons,bstopp/acs-aem-commons,bstopp/acs-aem-commons,Adobe-Consulting-Services/acs-aem-commons,bstopp/acs-aem-commons,Adobe-Consulting-Services/acs-aem-commons,Adobe-Consulting-Services/acs-aem-commons,SachinMali/acs-aem-commons | /*
* #%L
* ACS AEM Commons Bundle
* %%
* Copyright (C) 2015 Adobe
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package com.adobe.acs.commons.analysis.jcrchecksum.impl.servlets;
import com.adobe.acs.commons.analysis.jcrchecksum.ChecksumGeneratorOptions;
import com.adobe.acs.commons.analysis.jcrchecksum.impl.JSONGenerator;
import com.adobe.acs.commons.analysis.jcrchecksum.impl.options.ChecksumGeneratorOptionsFactory;
import com.adobe.acs.commons.analysis.jcrchecksum.impl.options.RequestChecksumGeneratorOptions;
import com.google.gson.stream.JsonWriter;
import org.apache.commons.collections.CollectionUtils;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Properties;
import org.apache.felix.scr.annotations.Property;
import org.apache.felix.scr.annotations.Service;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.api.SlingHttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.Set;
@SuppressWarnings("serial")
@Component
@Properties({
@Property(
name="sling.servlet.paths",
value= JSONDumpServlet.SERVLET_PATH
),
@Property(
name="sling.auth.requirements",
value= "-" + JSONDumpServlet.SERVLET_PATH
)
})
@Service
public class JSONDumpServlet extends BaseChecksumServlet {
private static final Logger log = LoggerFactory.getLogger(JSONDumpServlet.class);
public static final String SERVLET_PATH = ServletConstants.SERVLET_PATH + "."
+ ServletConstants.JSON_SERVLET_SELECTOR + "."
+ ServletConstants.JSON_SERVLET_EXTENSION;
@Override
public final void doGet(SlingHttpServletRequest request, SlingHttpServletResponse response) throws
ServletException, IOException {
try {
this.handleCORS(request, response);
this.handleRequest(request, response);
} catch (RepositoryException e) {
throw new ServletException(e);
}
}
public final void doPost(SlingHttpServletRequest request, SlingHttpServletResponse response) throws
ServletException, IOException {
try {
this.handleCORS(request, response);
this.handleRequest(request, response);
} catch (RepositoryException e) {
throw new ServletException(e);
}
}
private void handleRequest(SlingHttpServletRequest request, SlingHttpServletResponse response)
throws IOException,
RepositoryException, ServletException {
response.setContentType("application/json");
response.setCharacterEncoding("UTF-8");
// Generate current date and time for filename
DateFormat df = new SimpleDateFormat("yyyyddMM_HHmmss");
Date today = Calendar.getInstance().getTime();
String filename = df.format(today);
response.setHeader("Content-Disposition", "filename=jcr-checksum-"
+ filename + ".json");
String optionsName = request.getParameter(ServletConstants.OPTIONS_NAME);
ChecksumGeneratorOptions options =
ChecksumGeneratorOptionsFactory.getOptions(request, optionsName);
if (log.isDebugEnabled()) {
log.debug(options.toString());
}
Set<String> paths = RequestChecksumGeneratorOptions.getPaths(request);
if (CollectionUtils.isEmpty(paths)) {
try {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
response.getWriter().print(
"ERROR: At least one path must be specified");
} catch (IOException ioe) {
throw ioe;
}
} else {
Session session = request.getResourceResolver().adaptTo(Session.class);
JsonWriter jsonWriter = new JsonWriter(response.getWriter());
try {
JSONGenerator.generateJSON(session, paths, options, jsonWriter);
jsonWriter.close();
} catch (RepositoryException e) {
throw new ServletException("Error accessing repository", e);
} catch (IOException e) {
throw new ServletException("Unable to generate json", e);
}
}
}
} | bundle/src/main/java/com/adobe/acs/commons/analysis/jcrchecksum/impl/servlets/JSONDumpServlet.java | /*
* #%L
* ACS AEM Commons Bundle
* %%
* Copyright (C) 2015 Adobe
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package com.adobe.acs.commons.analysis.jcrchecksum.impl.servlets;
import com.adobe.acs.commons.analysis.jcrchecksum.ChecksumGeneratorOptions;
import com.adobe.acs.commons.analysis.jcrchecksum.impl.JSONGenerator;
import com.adobe.acs.commons.analysis.jcrchecksum.impl.options.ChecksumGeneratorOptionsFactory;
import com.adobe.acs.commons.analysis.jcrchecksum.impl.options.RequestChecksumGeneratorOptions;
import com.google.gson.stream.JsonWriter;
import org.apache.commons.collections.CollectionUtils;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Properties;
import org.apache.felix.scr.annotations.Property;
import org.apache.felix.scr.annotations.Service;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.api.SlingHttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.Set;
@SuppressWarnings("serial")
@Component
@Properties({
@Property(
name="sling.servlet.paths",
value= JSONDumpServlet.SERVLET_PATH
),
@Property(
name="sling.auth.requirements",
value= "-" + JSONDumpServlet.SERVLET_PATH
)
})
@Service
public class JSONDumpServlet extends BaseChecksumServlet {
private static final Logger log = LoggerFactory.getLogger(JSONDumpServlet.class);
public static final String SERVLET_PATH = ServletConstants.SERVLET_PATH + "."
+ ServletConstants.JSON_SERVLET_SELECTOR + "."
+ ServletConstants.JSON_SERVLET_EXTENSION;
@Override
public final void doGet(SlingHttpServletRequest request, SlingHttpServletResponse response) throws
ServletException, IOException {
try {
this.handleCORS(request, response);
this.handleRequest(request, response);
} catch (RepositoryException e) {
throw new ServletException(e);
}
}
public final void doPost(SlingHttpServletRequest request, SlingHttpServletResponse response) throws
ServletException, IOException {
try {
this.handleCORS(request, response);
this.handleRequest(request, response);
} catch (RepositoryException e) {
throw new ServletException(e);
}
}
private void handleRequest(SlingHttpServletRequest request, SlingHttpServletResponse response)
throws IOException,
RepositoryException, ServletException {
response.setContentType("application/json");
response.setCharacterEncoding("UTF-8");
// Generate current date and time for filename
DateFormat df = new SimpleDateFormat("yyyyddMM_HHmmss");
Date today = Calendar.getInstance().getTime();
String filename = df.format(today);
response.setHeader("Content-Disposition", "filename=jcr-checksum-"
+ filename + ".json");
String optionsName = request.getParameter(ServletConstants.OPTIONS_NAME);
ChecksumGeneratorOptions options =
ChecksumGeneratorOptionsFactory.getOptions(request, optionsName);
if (log.isDebugEnabled()) {
log.debug(options.toString());
}
Set<String> paths = RequestChecksumGeneratorOptions.getPaths(request);
if (CollectionUtils.isEmpty(paths)) {
try {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
response.getWriter().print(
"ERROR: At least one path must be specified");
} catch (IOException ioe) {
throw ioe;
}
}
Session session = request.getResourceResolver().adaptTo(Session.class);
JsonWriter jsonWriter = new JsonWriter(response.getWriter());
try {
JSONGenerator.generateJSON(session, paths, options, jsonWriter);
} catch (RepositoryException e) {
throw new ServletException("Error accessing repository", e);
} catch (IOException e) {
throw new ServletException("Unable to generate json", e);
}
}
} | Closing the JsonWriter from the previous commit.
| bundle/src/main/java/com/adobe/acs/commons/analysis/jcrchecksum/impl/servlets/JSONDumpServlet.java | Closing the JsonWriter from the previous commit. |
|
Java | apache-2.0 | 9f6f8af5fef90ec52bd05ce2bbca2931f9057559 | 0 | dgrove727/autopsy,millmanorama/autopsy,rcordovano/autopsy,esaunders/autopsy,APriestman/autopsy,rcordovano/autopsy,APriestman/autopsy,millmanorama/autopsy,millmanorama/autopsy,APriestman/autopsy,rcordovano/autopsy,wschaeferB/autopsy,narfindustries/autopsy,APriestman/autopsy,esaunders/autopsy,rcordovano/autopsy,esaunders/autopsy,dgrove727/autopsy,esaunders/autopsy,wschaeferB/autopsy,narfindustries/autopsy,APriestman/autopsy,APriestman/autopsy,wschaeferB/autopsy,narfindustries/autopsy,millmanorama/autopsy,wschaeferB/autopsy,wschaeferB/autopsy,APriestman/autopsy,esaunders/autopsy,rcordovano/autopsy,rcordovano/autopsy,dgrove727/autopsy | /*
* Autopsy Forensic Browser
*
* Copyright 2011 - 2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.modules.hashdatabase;
import java.beans.PropertyChangeEvent;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.swing.JFileChooser;
import javax.swing.filechooser.FileNameExtensionFilter;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.coreutils.XMLUtil;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.Serializable;
import java.util.concurrent.ExecutionException;
import java.util.logging.Level;
import javax.persistence.PersistenceException;
import javax.swing.JOptionPane;
import javax.swing.SwingWorker;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.FileUtils;
import org.netbeans.api.progress.ProgressHandle;
import org.netbeans.api.progress.ProgressHandleFactory;
import org.openide.util.io.NbObjectInputStream;
import org.openide.util.io.NbObjectOutputStream;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.HashHitInfo;
import org.sleuthkit.datamodel.HashEntry;
import org.sleuthkit.datamodel.SleuthkitJNI;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.ingest.IngestManager;
/**
* This class implements a singleton that manages the set of hash databases used
* to classify files as unknown, known or known bad.
*/
public class HashDbManager implements PropertyChangeListener {
private static final String ROOT_ELEMENT = "hash_sets"; //NON-NLS
private static final String SET_ELEMENT = "hash_set"; //NON-NLS
private static final String SET_NAME_ATTRIBUTE = "name"; //NON-NLS
private static final String SET_TYPE_ATTRIBUTE = "type"; //NON-NLS
private static final String SEARCH_DURING_INGEST_ATTRIBUTE = "use_for_ingest"; //NON-NLS
private static final String SEND_INGEST_MESSAGES_ATTRIBUTE = "show_inbox_messages"; //NON-NLS
private static final String PATH_ELEMENT = "hash_set_path"; //NON-NLS
private static final String LEGACY_PATH_NUMBER_ATTRIBUTE = "number"; //NON-NLS
private static final String CONFIG_FILE_NAME = "hashsets.xml"; //NON-NLS
private static final String DB_SERIALIZATION_FILE_NAME = "hashDbs.settings";
private static final String XSD_FILE_NAME = "HashsetsSchema.xsd"; //NON-NLS
private static final String ENCODING = "UTF-8"; //NON-NLS
private static final String HASH_DATABASE_FILE_EXTENSON = "kdb"; //NON-NLS
private static HashDbManager instance = null;
private final String configFilePath = PlatformUtil.getUserConfigDirectory() + File.separator + CONFIG_FILE_NAME;
private final String DB_SERIALIZATION_FILE_PATH = PlatformUtil.getUserConfigDirectory() + File.separator + DB_SERIALIZATION_FILE_NAME;
private List<HashDb> knownHashSets = new ArrayList<>();
private List<HashDb> knownBadHashSets = new ArrayList<>();
private Set<String> hashSetNames = new HashSet<>();
private Set<String> hashSetPaths = new HashSet<>();
PropertyChangeSupport changeSupport = new PropertyChangeSupport(HashDbManager.class);
private static final Logger logger = Logger.getLogger(HashDbManager.class.getName());
/**
* Property change event support In events: For both of these enums, the old
* value should be null, and the new value should be the hashset name
* string.
*/
public enum SetEvt {
DB_ADDED, DB_DELETED, DB_INDEXED
};
/**
* Gets the singleton instance of this class.
*/
public static synchronized HashDbManager getInstance() {
if (instance == null) {
instance = new HashDbManager();
}
return instance;
}
public synchronized void addPropertyChangeListener(PropertyChangeListener listener) {
changeSupport.addPropertyChangeListener(listener);
}
private HashDbManager() {
readHashSetsConfigurationFromDisk();
}
/**
* Gets the extension, without the dot separator, that the SleuthKit
* requires for the hash database files that combine a database and an index
* and can therefore be updated.
*/
static String getHashDatabaseFileExtension() {
return HASH_DATABASE_FILE_EXTENSON;
}
public class HashDbManagerException extends Exception {
private HashDbManagerException(String message) {
super(message);
}
}
/**
* Adds an existing hash database to the set of hash databases used to
* classify files as known or known bad and saves the configuration.
*
* @param hashSetName Name used to represent the hash database in
* user interface components.
* @param path Full path to either a hash database file or a
* hash database index file.
* @param searchDuringIngest A flag indicating whether or not the hash
* database should be searched during ingest.
* @param sendIngestMessages A flag indicating whether hash set hit messages
* should be sent as ingest messages.
* @param knownFilesType The classification to apply to files whose
* hashes are found in the hash database.
*
* @return A HashDb representing the hash database.
*
* @throws HashDbManagerException
*/
public HashDb addExistingHashDatabase(String hashSetName, String path, boolean searchDuringIngest, boolean sendIngestMessages, HashDb.KnownFilesType knownFilesType) throws HashDbManagerException {
HashDb hashDb = null;
try {
addExistingHashDatabaseInternal(hashSetName, path, searchDuringIngest, sendIngestMessages, knownFilesType);
} catch (TskCoreException ex) {
throw new HashDbManagerException(ex.getMessage());
}
// Save the configuration
if (!save()) {
throw new HashDbManagerException(NbBundle.getMessage(this.getClass(), "HashDbManager.saveErrorExceptionMsg"));
}
return hashDb;
}
/**
* Adds an existing hash database to the set of hash databases used to
* classify files as known or known bad. Does not save the configuration -
* the configuration is only saved on demand to support cancellation of
* configuration panels.
*
* @param hashSetName Name used to represent the hash database in
* user interface components.
* @param path Full path to either a hash database file or a
* hash database index file.
* @param searchDuringIngest A flag indicating whether or not the hash
* database should be searched during ingest.
* @param sendIngestMessages A flag indicating whether hash set hit messages
* should be sent as ingest messages.
* @param knownFilesType The classification to apply to files whose
* hashes are found in the hash database.
*
* @return A HashDb representing the hash database.
*
* @throws HashDbManagerException, TskCoreException
*/
synchronized HashDb addExistingHashDatabaseInternal(String hashSetName, String path, boolean searchDuringIngest, boolean sendIngestMessages, HashDb.KnownFilesType knownFilesType) throws HashDbManagerException, TskCoreException {
if (!new File(path).exists()) {
throw new HashDbManagerException(NbBundle.getMessage(HashDbManager.class, "HashDbManager.hashDbDoesNotExistExceptionMsg", path));
}
if (hashSetPaths.contains(path)) {
throw new HashDbManagerException(NbBundle.getMessage(HashDbManager.class, "HashDbManager.hashDbAlreadyAddedExceptionMsg", path));
}
if (hashSetNames.contains(hashSetName)) {
throw new HashDbManagerException(NbBundle.getMessage(HashDbManager.class, "HashDbManager.duplicateHashSetNameExceptionMsg", hashSetName));
}
return addHashDatabase(SleuthkitJNI.openHashDatabase(path), hashSetName, searchDuringIngest, sendIngestMessages, knownFilesType);
}
/**
* Adds a new hash database to the set of hash databases used to classify
* files as known or known bad and saves the configuration.
*
* @param hashSetName Hash set name used to represent the hash
* database in user interface components.
* @param path Full path to the database file to be created.
* @param searchDuringIngest A flag indicating whether or not the hash
* database should be searched during ingest.
* @param sendIngestMessages A flag indicating whether hash set hit messages
* should be sent as ingest messages.
* @param knownFilesType The classification to apply to files whose
* hashes are found in the hash database.
*
* @return A HashDb representing the hash database.
*
* @throws HashDbManagerException
*/
public HashDb addNewHashDatabase(String hashSetName, String path, boolean searchDuringIngest, boolean sendIngestMessages,
HashDb.KnownFilesType knownFilesType) throws HashDbManagerException {
HashDb hashDb = null;
try {
hashDb = addNewHashDatabaseInternal(hashSetName, path, searchDuringIngest, sendIngestMessages, knownFilesType);
} catch (TskCoreException ex) {
throw new HashDbManagerException(ex.getMessage());
}
// Save the configuration
if (!save()) {
throw new HashDbManagerException(NbBundle.getMessage(this.getClass(), "HashDbManager.saveErrorExceptionMsg"));
}
return hashDb;
}
/**
* Adds a new hash database to the set of hash databases used to classify
* files as known or known bad. Does not save the configuration - the
* configuration is only saved on demand to support cancellation of
* configuration panels.
*
* @param hashSetName Hash set name used to represent the hash
* database in user interface components.
* @param path Full path to the database file to be created.
* @param searchDuringIngest A flag indicating whether or not the hash
* database should be searched during ingest.
* @param sendIngestMessages A flag indicating whether hash set hit messages
* should be sent as ingest messages.
* @param knownFilesType The classification to apply to files whose
* hashes are found in the hash database.
*
* @return A HashDb representing the hash database.
*
* @throws HashDbManagerException, TskCoreException
*/
synchronized HashDb addNewHashDatabaseInternal(String hashSetName, String path, boolean searchDuringIngest, boolean sendIngestMessages, HashDb.KnownFilesType knownFilesType) throws HashDbManagerException, TskCoreException {
File file = new File(path);
if (file.exists()) {
throw new HashDbManagerException(NbBundle.getMessage(HashDbManager.class, "HashDbManager.hashDbFileExistsExceptionMsg", path));
}
if (!FilenameUtils.getExtension(file.getName()).equalsIgnoreCase(HASH_DATABASE_FILE_EXTENSON)) {
throw new HashDbManagerException(NbBundle.getMessage(HashDbManager.class, "HashDbManager.illegalHashDbFileNameExtensionMsg",
getHashDatabaseFileExtension()));
}
if (hashSetPaths.contains(path)) {
throw new HashDbManagerException(NbBundle.getMessage(HashDbManager.class, "HashDbManager.hashDbAlreadyAddedExceptionMsg", path));
}
if (hashSetNames.contains(hashSetName)) {
throw new HashDbManagerException(NbBundle.getMessage(HashDbManager.class, "HashDbManager.duplicateHashSetNameExceptionMsg", hashSetName));
}
return addHashDatabase(SleuthkitJNI.createHashDatabase(path), hashSetName, searchDuringIngest, sendIngestMessages, knownFilesType);
}
private HashDb addHashDatabase(int handle, String hashSetName, boolean searchDuringIngest, boolean sendIngestMessages, HashDb.KnownFilesType knownFilesType) throws TskCoreException {
// Wrap an object around the handle.
HashDb hashDb = new HashDb(handle, hashSetName, searchDuringIngest, sendIngestMessages, knownFilesType);
// Get the indentity data before updating the collections since the
// accessor methods may throw.
String databasePath = hashDb.getDatabasePath();
String indexPath = hashDb.getIndexPath();
// Update the collections used to ensure that hash set names are unique
// and the same database is not added to the configuration more than once.
hashSetNames.add(hashDb.getHashSetName());
if (!databasePath.equals("None")) { //NON-NLS
hashSetPaths.add(databasePath);
}
if (!indexPath.equals("None")) { //NON-NLS
hashSetPaths.add(indexPath);
}
// Add the hash database to the appropriate collection for its type.
if (hashDb.getKnownFilesType() == HashDb.KnownFilesType.KNOWN) {
knownHashSets.add(hashDb);
} else {
knownBadHashSets.add(hashDb);
}
// Let any external listeners know that there's a new set
try {
changeSupport.firePropertyChange(SetEvt.DB_ADDED.toString(), null, hashSetName);
} catch (Exception e) {
logger.log(Level.SEVERE, "HashDbManager listener threw exception", e); //NON-NLS
MessageNotifyUtil.Notify.show(
NbBundle.getMessage(this.getClass(), "HashDbManager.moduleErr"),
NbBundle.getMessage(this.getClass(), "HashDbManager.moduleErrorListeningToUpdatesMsg"),
MessageNotifyUtil.MessageType.ERROR);
}
return hashDb;
}
synchronized void indexHashDatabase(HashDb hashDb) {
hashDb.addPropertyChangeListener(this);
HashDbIndexer creator = new HashDbIndexer(hashDb);
creator.execute();
}
@Override
public void propertyChange(PropertyChangeEvent event) {
if (event.getPropertyName().equals(HashDb.Event.INDEXING_DONE.name())) {
HashDb hashDb = (HashDb) event.getNewValue();
if (null != hashDb) {
try {
String indexPath = hashDb.getIndexPath();
if (!indexPath.equals("None")) { //NON-NLS
hashSetPaths.add(indexPath);
}
} catch (TskCoreException ex) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error getting index path of " + hashDb.getHashSetName() + " hash database after indexing", ex); //NON-NLS
}
}
}
}
/**
* Removes a hash database from the set of hash databases used to classify
* files as known or known bad and saves the configuration.
*
* @param hashDb
*
* @throws HashDbManagerException
*/
public synchronized void removeHashDatabase(HashDb hashDb) throws HashDbManagerException {
// Don't remove a database if ingest is running
boolean ingestIsRunning = IngestManager.getInstance().isIngestRunning();
if (ingestIsRunning) {
throw new HashDbManagerException(NbBundle.getMessage(this.getClass(), "HashDbManager.ingestRunningExceptionMsg"));
}
removeHashDatabaseInternal(hashDb);
if (!save()) {
throw new HashDbManagerException(NbBundle.getMessage(this.getClass(), "HashDbManager.saveErrorExceptionMsg"));
}
}
/**
* Removes a hash database from the set of hash databases used to classify
* files as known or known bad. Does not save the configuration - the
* configuration is only saved on demand to support cancellation of
* configuration panels.
*
* @throws TskCoreException
*/
synchronized void removeHashDatabaseInternal(HashDb hashDb) {
// Remove the database from whichever hash set list it occupies,
// and remove its hash set name from the hash set used to ensure unique
// hash set names are used, before undertaking These operations will succeed and constitute
// a mostly effective removal, even if the subsequent operations fail.
String hashSetName = hashDb.getHashSetName();
knownHashSets.remove(hashDb);
knownBadHashSets.remove(hashDb);
hashSetNames.remove(hashSetName);
// Now undertake the operations that could throw.
try {
hashSetPaths.remove(hashDb.getIndexPath());
} catch (TskCoreException ex) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error getting index path of " + hashDb.getHashSetName() + " hash database when removing the database", ex); //NON-NLS
}
try {
if (!hashDb.hasIndexOnly()) {
hashSetPaths.remove(hashDb.getDatabasePath());
}
} catch (TskCoreException ex) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error getting database path of " + hashDb.getHashSetName() + " hash database when removing the database", ex); //NON-NLS
}
try {
hashDb.close();
} catch (TskCoreException ex) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error closing " + hashDb.getHashSetName() + " hash database when removing the database", ex); //NON-NLS
}
// Let any external listeners know that a set has been deleted
try {
changeSupport.firePropertyChange(SetEvt.DB_DELETED.toString(), null, hashSetName);
} catch (Exception e) {
logger.log(Level.SEVERE, "HashDbManager listener threw exception", e); //NON-NLS
MessageNotifyUtil.Notify.show(
NbBundle.getMessage(this.getClass(), "HashDbManager.moduleErr"),
NbBundle.getMessage(this.getClass(), "HashDbManager.moduleErrorListeningToUpdatesMsg"),
MessageNotifyUtil.MessageType.ERROR);
}
}
/**
* Gets all of the hash databases used to classify files as known or known
* bad.
*
* @return A list, possibly empty, of hash databases.
*/
public synchronized List<HashDb> getAllHashSets() {
List<HashDb> hashDbs = new ArrayList<>();
hashDbs.addAll(knownHashSets);
hashDbs.addAll(knownBadHashSets);
return hashDbs;
}
/**
* Gets all of the hash databases used to classify files as known.
*
* @return A list, possibly empty, of hash databases.
*/
public synchronized List<HashDb> getKnownFileHashSets() {
List<HashDb> hashDbs = new ArrayList<>();
hashDbs.addAll(knownHashSets);
return hashDbs;
}
/**
* Gets all of the hash databases used to classify files as known bad.
*
* @return A list, possibly empty, of hash databases.
*/
public synchronized List<HashDb> getKnownBadFileHashSets() {
List<HashDb> hashDbs = new ArrayList<>();
hashDbs.addAll(knownBadHashSets);
return hashDbs;
}
/**
* Gets all of the hash databases that accept updates.
*
* @return A list, possibly empty, of hash databases.
*/
public synchronized List<HashDb> getUpdateableHashSets() {
List<HashDb> updateableDbs = getUpdateableHashSets(knownHashSets);
updateableDbs.addAll(getUpdateableHashSets(knownBadHashSets));
return updateableDbs;
}
private List<HashDb> getUpdateableHashSets(List<HashDb> hashDbs) {
ArrayList<HashDb> updateableDbs = new ArrayList<>();
for (HashDb db : hashDbs) {
try {
if (db.isUpdateable()) {
updateableDbs.add(db);
}
} catch (TskCoreException ex) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error checking updateable status of " + db.getHashSetName() + " hash database", ex); //NON-NLS
}
}
return updateableDbs;
}
/**
* Saves the hash sets configuration. Note that the configuration is only
* saved on demand to support cancellation of configuration panels.
*
* @return True on success, false otherwise.
*/
synchronized boolean save() {
return writeHashSetConfigurationToDisk();
}
/**
* Restores the last saved hash sets configuration. This supports
* cancellation of configuration panels.
*/
public synchronized void loadLastSavedConfiguration() {
closeHashDatabases(knownHashSets);
closeHashDatabases(knownBadHashSets);
hashSetNames.clear();
hashSetPaths.clear();
readHashSetsConfigurationFromDisk();
}
private void closeHashDatabases(List<HashDb> hashDatabases) {
for (HashDb database : hashDatabases) {
try {
database.close();
} catch (TskCoreException ex) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error closing " + database.getHashSetName() + " hash database", ex); //NON-NLS
}
}
hashDatabases.clear();
}
private boolean writeHashSetConfigurationToDisk() {
HashDbSerializationSettings settings = new HashDbSerializationSettings(this.knownHashSets, this.knownBadHashSets);
try (NbObjectOutputStream out = new NbObjectOutputStream(new FileOutputStream(DB_SERIALIZATION_FILE_PATH))) {
out.writeObject(settings);
File xmlFile = new File(configFilePath);
if (xmlFile.exists()) {
xmlFile.delete();
}
return true;
} catch (IOException ex) {
throw new PersistenceException(String.format("Failed to write settings to %s", DB_SERIALIZATION_FILE_PATH), ex);
}
}
private static void writeHashDbsToDisk(Document doc, Element rootEl, List<HashDb> hashDbs) {
for (HashDb db : hashDbs) {
// Get the path for the hash database before writing anything, in
// case an exception is thrown.
String path;
try {
if (db.hasIndexOnly()) {
path = db.getIndexPath();
} else {
path = db.getDatabasePath();
}
} catch (TskCoreException ex) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error getting path of hash database " + db.getHashSetName() + ", discarding from hash database configuration", ex); //NON-NLS
continue;
}
Element setElement = doc.createElement(SET_ELEMENT);
setElement.setAttribute(SET_NAME_ATTRIBUTE, db.getHashSetName());
setElement.setAttribute(SET_TYPE_ATTRIBUTE, db.getKnownFilesType().toString());
setElement.setAttribute(SEARCH_DURING_INGEST_ATTRIBUTE, Boolean.toString(db.getSearchDuringIngest()));
setElement.setAttribute(SEND_INGEST_MESSAGES_ATTRIBUTE, Boolean.toString(db.getSendIngestMessages()));
Element pathElement = doc.createElement(PATH_ELEMENT);
pathElement.setTextContent(path);
setElement.appendChild(pathElement);
rootEl.appendChild(setElement);
}
}
private boolean hashSetsConfigurationFileExists() {
File f = new File(configFilePath);
return f.exists() && f.canRead() && f.canWrite();
}
private boolean readHashSetsConfigurationFromDisk() {
if (hashSetsConfigurationFileExists()) {
boolean updatedSchema = false;
// Open the XML document that implements the configuration file.
final Document doc = XMLUtil.loadDoc(HashDbManager.class, configFilePath);
if (doc == null) {
return false;
}
// Get the root element.
Element root = doc.getDocumentElement();
if (root == null) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error loading hash sets: invalid file format."); //NON-NLS
return false;
}
// Get the hash set elements.
NodeList setsNList = root.getElementsByTagName(SET_ELEMENT);
int numSets = setsNList.getLength();
if (numSets == 0) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.WARNING, "No element hash_set exists."); //NON-NLS
}
// Create HashDb objects for each hash set element. Skip to the next hash database if the definition of
// a particular hash database is not well-formed.
String attributeErrorMessage = " attribute was not set for hash_set at index {0}, cannot make instance of HashDb class"; //NON-NLS
String elementErrorMessage = " element was not set for hash_set at index {0}, cannot make instance of HashDb class"; //NON-NLS
for (int i = 0; i < numSets; ++i) {
Element setEl = (Element) setsNList.item(i);
String hashSetName = setEl.getAttribute(SET_NAME_ATTRIBUTE);
if (hashSetName.isEmpty()) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, SET_NAME_ATTRIBUTE + attributeErrorMessage, i);
continue;
}
// Handle configurations saved before duplicate hash set names were not permitted.
if (hashSetNames.contains(hashSetName)) {
int suffix = 0;
String newHashSetName;
do {
++suffix;
newHashSetName = hashSetName + suffix;
} while (hashSetNames.contains(newHashSetName));
JOptionPane.showMessageDialog(null,
NbBundle.getMessage(this.getClass(),
"HashDbManager.replacingDuplicateHashsetNameMsg",
hashSetName, newHashSetName),
NbBundle.getMessage(this.getClass(), "HashDbManager.openHashDbErr"),
JOptionPane.ERROR_MESSAGE);
hashSetName = newHashSetName;
}
String knownFilesType = setEl.getAttribute(SET_TYPE_ATTRIBUTE);
if (knownFilesType.isEmpty()) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, SET_TYPE_ATTRIBUTE + attributeErrorMessage, i);
continue;
}
// Handle legacy known files types.
if (knownFilesType.equals("NSRL")) { //NON-NLS
knownFilesType = HashDb.KnownFilesType.KNOWN.toString();
updatedSchema = true;
}
final String searchDuringIngest = setEl.getAttribute(SEARCH_DURING_INGEST_ATTRIBUTE);
if (searchDuringIngest.isEmpty()) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, SEARCH_DURING_INGEST_ATTRIBUTE + attributeErrorMessage, i);
continue;
}
Boolean seearchDuringIngestFlag = Boolean.parseBoolean(searchDuringIngest);
final String sendIngestMessages = setEl.getAttribute(SEND_INGEST_MESSAGES_ATTRIBUTE);
if (searchDuringIngest.isEmpty()) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, SEND_INGEST_MESSAGES_ATTRIBUTE + attributeErrorMessage, i);
continue;
}
Boolean sendIngestMessagesFlag = Boolean.parseBoolean(sendIngestMessages);
String dbPath;
NodeList pathsNList = setEl.getElementsByTagName(PATH_ELEMENT);
if (pathsNList.getLength() > 0) {
Element pathEl = (Element) pathsNList.item(0); // Shouldn't be more than one.
// Check for legacy path number attribute.
String legacyPathNumber = pathEl.getAttribute(LEGACY_PATH_NUMBER_ATTRIBUTE);
if (null != legacyPathNumber && !legacyPathNumber.isEmpty()) {
updatedSchema = true;
}
dbPath = pathEl.getTextContent();
if (dbPath.isEmpty()) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, PATH_ELEMENT + elementErrorMessage, i);
continue;
}
} else {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, PATH_ELEMENT + elementErrorMessage, i);
continue;
}
dbPath = getValidFilePath(hashSetName, dbPath);
if (null != dbPath) {
try {
addExistingHashDatabaseInternal(hashSetName, dbPath, seearchDuringIngestFlag, sendIngestMessagesFlag, HashDb.KnownFilesType.valueOf(knownFilesType));
} catch (HashDbManagerException | TskCoreException ex) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error opening hash database", ex); //NON-NLS
JOptionPane.showMessageDialog(null,
NbBundle.getMessage(this.getClass(),
"HashDbManager.unableToOpenHashDbMsg", dbPath),
NbBundle.getMessage(this.getClass(), "HashDbManager.openHashDbErr"),
JOptionPane.ERROR_MESSAGE);
}
} else {
Logger.getLogger(HashDbManager.class.getName()).log(Level.WARNING, "No valid path for hash_set at index {0}, cannot make instance of HashDb class", i); //NON-NLS
}
}
if (updatedSchema) {
String backupFilePath = configFilePath + ".v1_backup"; //NON-NLS
String messageBoxTitle = NbBundle.getMessage(this.getClass(),
"HashDbManager.msgBoxTitle.confFileFmtChanged");
String baseMessage = NbBundle.getMessage(this.getClass(),
"HashDbManager.baseMessage.updatedFormatHashDbConfig");
try {
FileUtils.copyFile(new File(configFilePath), new File(backupFilePath));
JOptionPane.showMessageDialog(null,
NbBundle.getMessage(this.getClass(),
"HashDbManager.savedBackupOfOldConfigMsg",
baseMessage, backupFilePath),
messageBoxTitle,
JOptionPane.INFORMATION_MESSAGE);
} catch (IOException ex) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.WARNING, "Failed to save backup of old format configuration file to " + backupFilePath, ex); //NON-NLS
JOptionPane.showMessageDialog(null, baseMessage, messageBoxTitle, JOptionPane.INFORMATION_MESSAGE);
}
writeHashSetConfigurationToDisk();
}
return true;
} else {
File fileSetFile = new File(DB_SERIALIZATION_FILE_PATH);
if (fileSetFile.exists()) {
try {
try (NbObjectInputStream in = new NbObjectInputStream(new FileInputStream(DB_SERIALIZATION_FILE_PATH))) {
HashDbSerializationSettings filesSetsSettings = (HashDbSerializationSettings) in.readObject();
this.setFields(filesSetsSettings);
return true;
}
} catch (IOException | ClassNotFoundException ex) {
throw new PersistenceException(String.format("Failed to read settings from %s", DB_SERIALIZATION_FILE_PATH), ex);
}
} else {
this.setFields(new HashDbSerializationSettings(new ArrayList<>(), new ArrayList<>()));
return true;
}
}
}
private void setFields(HashDbSerializationSettings settings) {
for (HashDbManager.HashDb hashDb : settings.getKnownHashSets()) {
try {
addExistingHashDatabaseInternal(hashDb.getHashSetName(), hashDb.getDatabasePath(), hashDb.getSearchDuringIngest(), hashDb.getSendIngestMessages(), HashDb.KnownFilesType.KNOWN);
} catch (HashDbManagerException | TskCoreException ex) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error opening hash database", ex); //NON-NLS
JOptionPane.showMessageDialog(null,
NbBundle.getMessage(this.getClass(),
"HashDbManager.unableToOpenHashDbMsg", hashDb.getHashSetName()),
NbBundle.getMessage(this.getClass(), "HashDbManager.openHashDbErr"),
JOptionPane.ERROR_MESSAGE);
}
}
for (HashDbManager.HashDb hashDb : settings.getKnownBadHashSets()) {
try {
addExistingHashDatabaseInternal(hashDb.getHashSetName(), hashDb.getDatabasePath(), hashDb.getSearchDuringIngest(), hashDb.getSendIngestMessages(), HashDb.KnownFilesType.KNOWN_BAD);
} catch (HashDbManagerException | TskCoreException ex) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error opening hash database", ex); //NON-NLS
JOptionPane.showMessageDialog(null,
NbBundle.getMessage(this.getClass(),
"HashDbManager.unableToOpenHashDbMsg", hashDb.getHashSetName()),
NbBundle.getMessage(this.getClass(), "HashDbManager.openHashDbErr"),
JOptionPane.ERROR_MESSAGE);
}
}
}
private String getValidFilePath(String hashSetName, String configuredPath) {
// Check the configured path.
File database = new File(configuredPath);
if (database.exists()) {
return configuredPath;
}
// Give the user an opportunity to find the desired file.
String newPath = null;
if (JOptionPane.showConfirmDialog(null,
NbBundle.getMessage(this.getClass(), "HashDbManager.dlgMsg.dbNotFoundAtLoc",
hashSetName, configuredPath),
NbBundle.getMessage(this.getClass(), "HashDbManager.dlgTitle.MissingDb"),
JOptionPane.YES_NO_OPTION) == JOptionPane.YES_OPTION) {
newPath = searchForFile();
if (null != newPath && !newPath.isEmpty()) {
database = new File(newPath);
if (!database.exists()) {
newPath = null;
}
}
}
return newPath;
}
private String searchForFile() {
String filePath = null;
JFileChooser fc = new JFileChooser();
fc.setDragEnabled(false);
fc.setFileSelectionMode(JFileChooser.FILES_ONLY);
String[] EXTENSION = new String[]{"txt", "idx", "hash", "Hash", "kdb"}; //NON-NLS
FileNameExtensionFilter filter = new FileNameExtensionFilter(
NbBundle.getMessage(this.getClass(), "HashDbManager.fileNameExtensionFilter.title"), EXTENSION);
fc.setFileFilter(filter);
fc.setMultiSelectionEnabled(false);
if (fc.showOpenDialog(null) == JFileChooser.APPROVE_OPTION) {
File f = fc.getSelectedFile();
try {
filePath = f.getCanonicalPath();
} catch (IOException ex) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.WARNING, "Couldn't get selected file path", ex); //NON-NLS
}
}
return filePath;
}
/**
* Instances of this class represent hash databases used to classify files
* as known or know bad.
*/
public static class HashDb implements Serializable {
/**
* Indicates how files with hashes stored in a particular hash database
* object should be classified.
*/
public enum KnownFilesType {
KNOWN(NbBundle.getMessage(HashDbManager.class, "HashDbManager.known.text")),
KNOWN_BAD(NbBundle.getMessage(HashDbManager.class, "HashDbManager.knownBad.text"));
private String displayName;
private KnownFilesType(String displayName) {
this.displayName = displayName;
}
public String getDisplayName() {
return this.displayName;
}
}
/**
* Property change events published by hash database objects.
*/
public enum Event {
INDEXING_DONE
}
private static final long serialVersionUID = 1L;
private int handle;
private String hashSetName;
private boolean searchDuringIngest;
private boolean sendIngestMessages;
private KnownFilesType knownFilesType;
private boolean indexing;
private final PropertyChangeSupport propertyChangeSupport = new PropertyChangeSupport(this);
private HashDb(int handle, String hashSetName, boolean useForIngest, boolean sendHitMessages, KnownFilesType knownFilesType) {
this.handle = handle;
this.hashSetName = hashSetName;
this.searchDuringIngest = useForIngest;
this.sendIngestMessages = sendHitMessages;
this.knownFilesType = knownFilesType;
this.indexing = false;
}
/**
* Adds a listener for the events defined in HashDb.Event.
*/
public void addPropertyChangeListener(PropertyChangeListener pcl) {
propertyChangeSupport.addPropertyChangeListener(pcl);
}
/**
* Removes a listener for the events defined in HashDb.Event.
*/
public void removePropertyChangeListener(PropertyChangeListener pcl) {
propertyChangeSupport.removePropertyChangeListener(pcl);
}
public String getHashSetName() {
return hashSetName;
}
public String getDatabasePath() throws TskCoreException {
return SleuthkitJNI.getHashDatabasePath(handle);
}
public String getIndexPath() throws TskCoreException {
return SleuthkitJNI.getHashDatabaseIndexPath(handle);
}
public KnownFilesType getKnownFilesType() {
return knownFilesType;
}
public boolean getSearchDuringIngest() {
return searchDuringIngest;
}
void setSearchDuringIngest(boolean useForIngest) {
this.searchDuringIngest = useForIngest;
}
public boolean getSendIngestMessages() {
return sendIngestMessages;
}
void setSendIngestMessages(boolean showInboxMessages) {
this.sendIngestMessages = showInboxMessages;
}
/**
* Indicates whether the hash database accepts updates.
*
* @return True if the database accepts updates, false otherwise.
*/
public boolean isUpdateable() throws TskCoreException {
return SleuthkitJNI.isUpdateableHashDatabase(this.handle);
}
/**
* Adds hashes of content (if calculated) to the hash database.
*
* @param content The content for which the calculated hashes, if any,
* are to be added to the hash database.
*
* @throws TskCoreException
*/
public void addHashes(Content content) throws TskCoreException {
addHashes(content, null);
}
/**
* Adds hashes of content (if calculated) to the hash database.
*
* @param content The content for which the calculated hashes, if any,
* are to be added to the hash database.
* @param comment A comment to associate with the hashes, e.g., the name
* of the case in which the content was encountered.
*
* @throws TskCoreException
*/
public void addHashes(Content content, String comment) throws TskCoreException {
// This only works for AbstractFiles and MD5 hashes at present.
assert content instanceof AbstractFile;
if (content instanceof AbstractFile) {
AbstractFile file = (AbstractFile) content;
if (null != file.getMd5Hash()) {
SleuthkitJNI.addToHashDatabase(null, file.getMd5Hash(), null, null, comment, handle);
}
}
}
/**
* Adds a list of hashes to the hash database at once
*
* @param hashes List of hashes
*
* @throws TskCoreException
*/
public void addHashes(List<HashEntry> hashes) throws TskCoreException {
SleuthkitJNI.addToHashDatabase(hashes, handle);
}
/**
* Perform a basic boolean lookup of the file's hash.
*
* @param content
*
* @return True if file's MD5 is in the hash database
*
* @throws TskCoreException
*/
public boolean lookupMD5Quick(Content content) throws TskCoreException {
boolean result = false;
assert content instanceof AbstractFile;
if (content instanceof AbstractFile) {
AbstractFile file = (AbstractFile) content;
if (null != file.getMd5Hash()) {
result = SleuthkitJNI.lookupInHashDatabase(file.getMd5Hash(), handle);
}
}
return result;
}
/**
* Lookup hash value in DB and provide details on file.
*
* @param content
*
* @return null if file is not in database.
*
* @throws TskCoreException
*/
public HashHitInfo lookupMD5(Content content) throws TskCoreException {
HashHitInfo result = null;
// This only works for AbstractFiles and MD5 hashes at present.
assert content instanceof AbstractFile;
if (content instanceof AbstractFile) {
AbstractFile file = (AbstractFile) content;
if (null != file.getMd5Hash()) {
result = SleuthkitJNI.lookupInHashDatabaseVerbose(file.getMd5Hash(), handle);
}
}
return result;
}
boolean hasIndex() throws TskCoreException {
return SleuthkitJNI.hashDatabaseHasLookupIndex(handle);
}
boolean hasIndexOnly() throws TskCoreException {
return SleuthkitJNI.hashDatabaseIsIndexOnly(handle);
}
boolean canBeReIndexed() throws TskCoreException {
return SleuthkitJNI.hashDatabaseCanBeReindexed(handle);
}
boolean isIndexing() {
return indexing;
}
private void close() throws TskCoreException {
SleuthkitJNI.closeHashDatabase(handle);
}
}
/**
* Worker thread to make an index of a database
*/
private class HashDbIndexer extends SwingWorker<Object, Void> {
private ProgressHandle progress = null;
private HashDb hashDb = null;
HashDbIndexer(HashDb hashDb) {
this.hashDb = hashDb;
}
;
@Override
protected Object doInBackground() {
hashDb.indexing = true;
progress = ProgressHandleFactory.createHandle(
NbBundle.getMessage(this.getClass(), "HashDbManager.progress.indexingHashSet", hashDb.hashSetName));
progress.start();
progress.switchToIndeterminate();
try {
SleuthkitJNI.createLookupIndexForHashDatabase(hashDb.handle);
} catch (TskCoreException ex) {
Logger.getLogger(HashDb.class.getName()).log(Level.SEVERE, "Error indexing hash database", ex); //NON-NLS
JOptionPane.showMessageDialog(null,
NbBundle.getMessage(this.getClass(),
"HashDbManager.dlgMsg.errorIndexingHashSet",
hashDb.getHashSetName()),
NbBundle.getMessage(this.getClass(), "HashDbManager.hashDbIndexingErr"),
JOptionPane.ERROR_MESSAGE);
}
return null;
}
@Override
protected void done() {
hashDb.indexing = false;
progress.finish();
// see if we got any errors
try {
get();
} catch (InterruptedException | ExecutionException ex) {
logger.log(Level.SEVERE, "Error creating index", ex); //NON-NLS
MessageNotifyUtil.Notify.show(
NbBundle.getMessage(this.getClass(), "HashDbManager.errCreatingIndex.title"),
NbBundle.getMessage(this.getClass(), "HashDbManager.errCreatingIndex.msg", ex.getMessage()),
MessageNotifyUtil.MessageType.ERROR);
} // catch and ignore if we were cancelled
catch (java.util.concurrent.CancellationException ex) {
}
try {
hashDb.propertyChangeSupport.firePropertyChange(HashDb.Event.INDEXING_DONE.toString(), null, hashDb);
hashDb.propertyChangeSupport.firePropertyChange(HashDbManager.SetEvt.DB_INDEXED.toString(), null, hashDb.getHashSetName());
} catch (Exception e) {
logger.log(Level.SEVERE, "HashDbManager listener threw exception", e); //NON-NLS
MessageNotifyUtil.Notify.show(
NbBundle.getMessage(this.getClass(), "HashDbManager.moduleErr"),
NbBundle.getMessage(this.getClass(), "HashDbManager.moduleErrorListeningToUpdatesMsg"),
MessageNotifyUtil.MessageType.ERROR);
}
}
}
}
| Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbManager.java | /*
* Autopsy Forensic Browser
*
* Copyright 2011 - 2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.modules.hashdatabase;
import java.beans.PropertyChangeEvent;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.swing.JFileChooser;
import javax.swing.filechooser.FileNameExtensionFilter;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.coreutils.XMLUtil;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.Serializable;
import java.util.concurrent.ExecutionException;
import java.util.logging.Level;
import javax.persistence.PersistenceException;
import javax.swing.JOptionPane;
import javax.swing.SwingWorker;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.FileUtils;
import org.netbeans.api.progress.ProgressHandle;
import org.netbeans.api.progress.ProgressHandleFactory;
import org.openide.util.io.NbObjectInputStream;
import org.openide.util.io.NbObjectOutputStream;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.HashHitInfo;
import org.sleuthkit.datamodel.HashEntry;
import org.sleuthkit.datamodel.SleuthkitJNI;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.ingest.IngestManager;
/**
* This class implements a singleton that manages the set of hash databases used
* to classify files as unknown, known or known bad.
*/
public class HashDbManager implements PropertyChangeListener {
private static final String ROOT_ELEMENT = "hash_sets"; //NON-NLS
private static final String SET_ELEMENT = "hash_set"; //NON-NLS
private static final String SET_NAME_ATTRIBUTE = "name"; //NON-NLS
private static final String SET_TYPE_ATTRIBUTE = "type"; //NON-NLS
private static final String SEARCH_DURING_INGEST_ATTRIBUTE = "use_for_ingest"; //NON-NLS
private static final String SEND_INGEST_MESSAGES_ATTRIBUTE = "show_inbox_messages"; //NON-NLS
private static final String PATH_ELEMENT = "hash_set_path"; //NON-NLS
private static final String LEGACY_PATH_NUMBER_ATTRIBUTE = "number"; //NON-NLS
private static final String CONFIG_FILE_NAME = "hashsets.xml"; //NON-NLS
private static final String DB_SERIALIZATION_FILE_NAME = "hashDbs.settings";
private static final String XSD_FILE_NAME = "HashsetsSchema.xsd"; //NON-NLS
private static final String ENCODING = "UTF-8"; //NON-NLS
private static final String HASH_DATABASE_FILE_EXTENSON = "kdb"; //NON-NLS
private static HashDbManager instance = null;
private final String configFilePath = PlatformUtil.getUserConfigDirectory() + File.separator + CONFIG_FILE_NAME;
private final String DB_SERIALIZATION_FILE_PATH = PlatformUtil.getUserConfigDirectory() + File.separator + DB_SERIALIZATION_FILE_NAME;
private List<HashDb> knownHashSets = new ArrayList<>();
private List<HashDb> knownBadHashSets = new ArrayList<>();
private Set<String> hashSetNames = new HashSet<>();
private Set<String> hashSetPaths = new HashSet<>();
PropertyChangeSupport changeSupport = new PropertyChangeSupport(HashDbManager.class);
private static final Logger logger = Logger.getLogger(HashDbManager.class.getName());
/**
* Property change event support In events: For both of these enums, the old
* value should be null, and the new value should be the hashset name
* string.
*/
public enum SetEvt {
DB_ADDED, DB_DELETED, DB_INDEXED
};
/**
* Gets the singleton instance of this class.
*/
public static synchronized HashDbManager getInstance() {
if (instance == null) {
instance = new HashDbManager();
}
return instance;
}
public synchronized void addPropertyChangeListener(PropertyChangeListener listener) {
changeSupport.addPropertyChangeListener(listener);
}
private HashDbManager() {
readHashSetsConfigurationFromDisk();
}
/**
* Gets the extension, without the dot separator, that the SleuthKit
* requires for the hash database files that combine a database and an index
* and can therefore be updated.
*/
static String getHashDatabaseFileExtension() {
return HASH_DATABASE_FILE_EXTENSON;
}
public class HashDbManagerException extends Exception {
private HashDbManagerException(String message) {
super(message);
}
}
/**
* Adds an existing hash database to the set of hash databases used to
* classify files as known or known bad and saves the configuration.
*
* @param hashSetName Name used to represent the hash database in
* user interface components.
* @param path Full path to either a hash database file or a
* hash database index file.
* @param searchDuringIngest A flag indicating whether or not the hash
* database should be searched during ingest.
* @param sendIngestMessages A flag indicating whether hash set hit messages
* should be sent as ingest messages.
* @param knownFilesType The classification to apply to files whose
* hashes are found in the hash database.
*
* @return A HashDb representing the hash database.
*
* @throws HashDbManagerException
*/
public HashDb addExistingHashDatabase(String hashSetName, String path, boolean searchDuringIngest, boolean sendIngestMessages, HashDb.KnownFilesType knownFilesType) throws HashDbManagerException {
HashDb hashDb = null;
try {
addExistingHashDatabaseInternal(hashSetName, path, searchDuringIngest, sendIngestMessages, knownFilesType);
} catch (TskCoreException ex) {
throw new HashDbManagerException(ex.getMessage());
}
// Save the configuration
if (!save()) {
throw new HashDbManagerException(NbBundle.getMessage(this.getClass(), "HashDbManager.saveErrorExceptionMsg"));
}
return hashDb;
}
/**
* Adds an existing hash database to the set of hash databases used to
* classify files as known or known bad. Does not save the configuration -
* the configuration is only saved on demand to support cancellation of
* configuration panels.
*
* @param hashSetName Name used to represent the hash database in
* user interface components.
* @param path Full path to either a hash database file or a
* hash database index file.
* @param searchDuringIngest A flag indicating whether or not the hash
* database should be searched during ingest.
* @param sendIngestMessages A flag indicating whether hash set hit messages
* should be sent as ingest messages.
* @param knownFilesType The classification to apply to files whose
* hashes are found in the hash database.
*
* @return A HashDb representing the hash database.
*
* @throws HashDbManagerException, TskCoreException
*/
synchronized HashDb addExistingHashDatabaseInternal(String hashSetName, String path, boolean searchDuringIngest, boolean sendIngestMessages, HashDb.KnownFilesType knownFilesType) throws HashDbManagerException, TskCoreException {
if (!new File(path).exists()) {
throw new HashDbManagerException(NbBundle.getMessage(HashDbManager.class, "HashDbManager.hashDbDoesNotExistExceptionMsg", path));
}
if (hashSetPaths.contains(path)) {
throw new HashDbManagerException(NbBundle.getMessage(HashDbManager.class, "HashDbManager.hashDbAlreadyAddedExceptionMsg", path));
}
if (hashSetNames.contains(hashSetName)) {
throw new HashDbManagerException(NbBundle.getMessage(HashDbManager.class, "HashDbManager.duplicateHashSetNameExceptionMsg", hashSetName));
}
return addHashDatabase(SleuthkitJNI.openHashDatabase(path), hashSetName, searchDuringIngest, sendIngestMessages, knownFilesType);
}
/**
* Adds a new hash database to the set of hash databases used to classify
* files as known or known bad and saves the configuration.
*
* @param hashSetName Hash set name used to represent the hash
* database in user interface components.
* @param path Full path to the database file to be created.
* @param searchDuringIngest A flag indicating whether or not the hash
* database should be searched during ingest.
* @param sendIngestMessages A flag indicating whether hash set hit messages
* should be sent as ingest messages.
* @param knownFilesType The classification to apply to files whose
* hashes are found in the hash database.
*
* @return A HashDb representing the hash database.
*
* @throws HashDbManagerException
*/
public HashDb addNewHashDatabase(String hashSetName, String path, boolean searchDuringIngest, boolean sendIngestMessages,
HashDb.KnownFilesType knownFilesType) throws HashDbManagerException {
HashDb hashDb = null;
try {
hashDb = addNewHashDatabaseInternal(hashSetName, path, searchDuringIngest, sendIngestMessages, knownFilesType);
} catch (TskCoreException ex) {
throw new HashDbManagerException(ex.getMessage());
}
// Save the configuration
if (!save()) {
throw new HashDbManagerException(NbBundle.getMessage(this.getClass(), "HashDbManager.saveErrorExceptionMsg"));
}
return hashDb;
}
/**
* Adds a new hash database to the set of hash databases used to classify
* files as known or known bad. Does not save the configuration - the
* configuration is only saved on demand to support cancellation of
* configuration panels.
*
* @param hashSetName Hash set name used to represent the hash
* database in user interface components.
* @param path Full path to the database file to be created.
* @param searchDuringIngest A flag indicating whether or not the hash
* database should be searched during ingest.
* @param sendIngestMessages A flag indicating whether hash set hit messages
* should be sent as ingest messages.
* @param knownFilesType The classification to apply to files whose
* hashes are found in the hash database.
*
* @return A HashDb representing the hash database.
*
* @throws HashDbManagerException, TskCoreException
*/
synchronized HashDb addNewHashDatabaseInternal(String hashSetName, String path, boolean searchDuringIngest, boolean sendIngestMessages, HashDb.KnownFilesType knownFilesType) throws HashDbManagerException, TskCoreException {
File file = new File(path);
if (file.exists()) {
throw new HashDbManagerException(NbBundle.getMessage(HashDbManager.class, "HashDbManager.hashDbFileExistsExceptionMsg", path));
}
if (!FilenameUtils.getExtension(file.getName()).equalsIgnoreCase(HASH_DATABASE_FILE_EXTENSON)) {
throw new HashDbManagerException(NbBundle.getMessage(HashDbManager.class, "HashDbManager.illegalHashDbFileNameExtensionMsg",
getHashDatabaseFileExtension()));
}
if (hashSetPaths.contains(path)) {
throw new HashDbManagerException(NbBundle.getMessage(HashDbManager.class, "HashDbManager.hashDbAlreadyAddedExceptionMsg", path));
}
if (hashSetNames.contains(hashSetName)) {
throw new HashDbManagerException(NbBundle.getMessage(HashDbManager.class, "HashDbManager.duplicateHashSetNameExceptionMsg", hashSetName));
}
return addHashDatabase(SleuthkitJNI.createHashDatabase(path), hashSetName, searchDuringIngest, sendIngestMessages, knownFilesType);
}
private HashDb addHashDatabase(int handle, String hashSetName, boolean searchDuringIngest, boolean sendIngestMessages, HashDb.KnownFilesType knownFilesType) throws TskCoreException {
// Wrap an object around the handle.
HashDb hashDb = new HashDb(handle, hashSetName, searchDuringIngest, sendIngestMessages, knownFilesType);
// Get the indentity data before updating the collections since the
// accessor methods may throw.
String databasePath = hashDb.getDatabasePath();
String indexPath = hashDb.getIndexPath();
// Update the collections used to ensure that hash set names are unique
// and the same database is not added to the configuration more than once.
hashSetNames.add(hashDb.getHashSetName());
if (!databasePath.equals("None")) { //NON-NLS
hashSetPaths.add(databasePath);
}
if (!indexPath.equals("None")) { //NON-NLS
hashSetPaths.add(indexPath);
}
// Add the hash database to the appropriate collection for its type.
if (hashDb.getKnownFilesType() == HashDb.KnownFilesType.KNOWN) {
knownHashSets.add(hashDb);
} else {
knownBadHashSets.add(hashDb);
}
// Let any external listeners know that there's a new set
try {
changeSupport.firePropertyChange(SetEvt.DB_ADDED.toString(), null, hashSetName);
} catch (Exception e) {
logger.log(Level.SEVERE, "HashDbManager listener threw exception", e); //NON-NLS
MessageNotifyUtil.Notify.show(
NbBundle.getMessage(this.getClass(), "HashDbManager.moduleErr"),
NbBundle.getMessage(this.getClass(), "HashDbManager.moduleErrorListeningToUpdatesMsg"),
MessageNotifyUtil.MessageType.ERROR);
}
return hashDb;
}
synchronized void indexHashDatabase(HashDb hashDb) {
hashDb.addPropertyChangeListener(this);
HashDbIndexer creator = new HashDbIndexer(hashDb);
creator.execute();
}
@Override
public void propertyChange(PropertyChangeEvent event) {
if (event.getPropertyName().equals(HashDb.Event.INDEXING_DONE.name())) {
HashDb hashDb = (HashDb) event.getNewValue();
if (null != hashDb) {
try {
String indexPath = hashDb.getIndexPath();
if (!indexPath.equals("None")) { //NON-NLS
hashSetPaths.add(indexPath);
}
} catch (TskCoreException ex) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error getting index path of " + hashDb.getHashSetName() + " hash database after indexing", ex); //NON-NLS
}
}
}
}
/**
* Removes a hash database from the set of hash databases used to classify
* files as known or known bad and saves the configuration.
*
* @param hashDb
*
* @throws HashDbManagerException
*/
public synchronized void removeHashDatabase(HashDb hashDb) throws HashDbManagerException {
// Don't remove a database if ingest is running
boolean ingestIsRunning = IngestManager.getInstance().isIngestRunning();
if (ingestIsRunning) {
throw new HashDbManagerException(NbBundle.getMessage(this.getClass(), "HashDbManager.ingestRunningExceptionMsg"));
}
removeHashDatabaseInternal(hashDb);
if (!save()) {
throw new HashDbManagerException(NbBundle.getMessage(this.getClass(), "HashDbManager.saveErrorExceptionMsg"));
}
}
/**
* Removes a hash database from the set of hash databases used to classify
* files as known or known bad. Does not save the configuration - the
* configuration is only saved on demand to support cancellation of
* configuration panels.
*
* @throws TskCoreException
*/
synchronized void removeHashDatabaseInternal(HashDb hashDb) {
// Remove the database from whichever hash set list it occupies,
// and remove its hash set name from the hash set used to ensure unique
// hash set names are used, before undertaking These operations will succeed and constitute
// a mostly effective removal, even if the subsequent operations fail.
String hashSetName = hashDb.getHashSetName();
knownHashSets.remove(hashDb);
knownBadHashSets.remove(hashDb);
hashSetNames.remove(hashSetName);
// Now undertake the operations that could throw.
try {
hashSetPaths.remove(hashDb.getIndexPath());
} catch (TskCoreException ex) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error getting index path of " + hashDb.getHashSetName() + " hash database when removing the database", ex); //NON-NLS
}
try {
if (!hashDb.hasIndexOnly()) {
hashSetPaths.remove(hashDb.getDatabasePath());
}
} catch (TskCoreException ex) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error getting database path of " + hashDb.getHashSetName() + " hash database when removing the database", ex); //NON-NLS
}
try {
hashDb.close();
} catch (TskCoreException ex) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error closing " + hashDb.getHashSetName() + " hash database when removing the database", ex); //NON-NLS
}
// Let any external listeners know that a set has been deleted
try {
changeSupport.firePropertyChange(SetEvt.DB_DELETED.toString(), null, hashSetName);
} catch (Exception e) {
logger.log(Level.SEVERE, "HashDbManager listener threw exception", e); //NON-NLS
MessageNotifyUtil.Notify.show(
NbBundle.getMessage(this.getClass(), "HashDbManager.moduleErr"),
NbBundle.getMessage(this.getClass(), "HashDbManager.moduleErrorListeningToUpdatesMsg"),
MessageNotifyUtil.MessageType.ERROR);
}
}
/**
* Gets all of the hash databases used to classify files as known or known
* bad.
*
* @return A list, possibly empty, of hash databases.
*/
public synchronized List<HashDb> getAllHashSets() {
List<HashDb> hashDbs = new ArrayList<>();
hashDbs.addAll(knownHashSets);
hashDbs.addAll(knownBadHashSets);
return hashDbs;
}
/**
* Gets all of the hash databases used to classify files as known.
*
* @return A list, possibly empty, of hash databases.
*/
public synchronized List<HashDb> getKnownFileHashSets() {
List<HashDb> hashDbs = new ArrayList<>();
hashDbs.addAll(knownHashSets);
return hashDbs;
}
/**
* Gets all of the hash databases used to classify files as known bad.
*
* @return A list, possibly empty, of hash databases.
*/
public synchronized List<HashDb> getKnownBadFileHashSets() {
List<HashDb> hashDbs = new ArrayList<>();
hashDbs.addAll(knownBadHashSets);
return hashDbs;
}
/**
* Gets all of the hash databases that accept updates.
*
* @return A list, possibly empty, of hash databases.
*/
public synchronized List<HashDb> getUpdateableHashSets() {
List<HashDb> updateableDbs = getUpdateableHashSets(knownHashSets);
updateableDbs.addAll(getUpdateableHashSets(knownBadHashSets));
return updateableDbs;
}
private List<HashDb> getUpdateableHashSets(List<HashDb> hashDbs) {
ArrayList<HashDb> updateableDbs = new ArrayList<>();
for (HashDb db : hashDbs) {
try {
if (db.isUpdateable()) {
updateableDbs.add(db);
}
} catch (TskCoreException ex) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error checking updateable status of " + db.getHashSetName() + " hash database", ex); //NON-NLS
}
}
return updateableDbs;
}
/**
* Saves the hash sets configuration. Note that the configuration is only
* saved on demand to support cancellation of configuration panels.
*
* @return True on success, false otherwise.
*/
synchronized boolean save() {
return writeHashSetConfigurationToDisk();
}
/**
* Restores the last saved hash sets configuration. This supports
* cancellation of configuration panels.
*/
public synchronized void loadLastSavedConfiguration() {
closeHashDatabases(knownHashSets);
closeHashDatabases(knownBadHashSets);
hashSetNames.clear();
hashSetPaths.clear();
readHashSetsConfigurationFromDisk();
}
private void closeHashDatabases(List<HashDb> hashDatabases) {
for (HashDb database : hashDatabases) {
try {
database.close();
} catch (TskCoreException ex) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error closing " + database.getHashSetName() + " hash database", ex); //NON-NLS
}
}
hashDatabases.clear();
}
private boolean writeHashSetConfigurationToDisk() {
HashDbSerializationSettings settings = new HashDbSerializationSettings(this.knownHashSets, this.knownBadHashSets);
try (NbObjectOutputStream out = new NbObjectOutputStream(new FileOutputStream(DB_SERIALIZATION_FILE_PATH))) {
out.writeObject(settings);
File xmlFile = new File(configFilePath);
if (xmlFile.exists()) {
xmlFile.delete();
}
return true;
} catch (IOException ex) {
throw new PersistenceException(String.format("Failed to write settings to %s", DB_SERIALIZATION_FILE_PATH), ex);
}
}
private static void writeHashDbsToDisk(Document doc, Element rootEl, List<HashDb> hashDbs) {
for (HashDb db : hashDbs) {
// Get the path for the hash database before writing anything, in
// case an exception is thrown.
String path;
try {
if (db.hasIndexOnly()) {
path = db.getIndexPath();
} else {
path = db.getDatabasePath();
}
} catch (TskCoreException ex) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error getting path of hash database " + db.getHashSetName() + ", discarding from hash database configuration", ex); //NON-NLS
continue;
}
Element setElement = doc.createElement(SET_ELEMENT);
setElement.setAttribute(SET_NAME_ATTRIBUTE, db.getHashSetName());
setElement.setAttribute(SET_TYPE_ATTRIBUTE, db.getKnownFilesType().toString());
setElement.setAttribute(SEARCH_DURING_INGEST_ATTRIBUTE, Boolean.toString(db.getSearchDuringIngest()));
setElement.setAttribute(SEND_INGEST_MESSAGES_ATTRIBUTE, Boolean.toString(db.getSendIngestMessages()));
Element pathElement = doc.createElement(PATH_ELEMENT);
pathElement.setTextContent(path);
setElement.appendChild(pathElement);
rootEl.appendChild(setElement);
}
}
private boolean hashSetsConfigurationFileExists() {
File f = new File(configFilePath);
return f.exists() && f.canRead() && f.canWrite();
}
private boolean readHashSetsConfigurationFromDisk() {
if (hashSetsConfigurationFileExists()) {
boolean updatedSchema = false;
// Open the XML document that implements the configuration file.
final Document doc = XMLUtil.loadDoc(HashDbManager.class, configFilePath);
if (doc == null) {
return false;
}
// Get the root element.
Element root = doc.getDocumentElement();
if (root == null) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error loading hash sets: invalid file format."); //NON-NLS
return false;
}
// Get the hash set elements.
NodeList setsNList = root.getElementsByTagName(SET_ELEMENT);
int numSets = setsNList.getLength();
if (numSets == 0) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.WARNING, "No element hash_set exists."); //NON-NLS
}
// Create HashDb objects for each hash set element. Skip to the next hash database if the definition of
// a particular hash database is not well-formed.
String attributeErrorMessage = " attribute was not set for hash_set at index {0}, cannot make instance of HashDb class"; //NON-NLS
String elementErrorMessage = " element was not set for hash_set at index {0}, cannot make instance of HashDb class"; //NON-NLS
for (int i = 0; i < numSets; ++i) {
Element setEl = (Element) setsNList.item(i);
String hashSetName = setEl.getAttribute(SET_NAME_ATTRIBUTE);
if (hashSetName.isEmpty()) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, SET_NAME_ATTRIBUTE + attributeErrorMessage, i);
continue;
}
// Handle configurations saved before duplicate hash set names were not permitted.
if (hashSetNames.contains(hashSetName)) {
int suffix = 0;
String newHashSetName;
do {
++suffix;
newHashSetName = hashSetName + suffix;
} while (hashSetNames.contains(newHashSetName));
JOptionPane.showMessageDialog(null,
NbBundle.getMessage(this.getClass(),
"HashDbManager.replacingDuplicateHashsetNameMsg",
hashSetName, newHashSetName),
NbBundle.getMessage(this.getClass(), "HashDbManager.openHashDbErr"),
JOptionPane.ERROR_MESSAGE);
hashSetName = newHashSetName;
}
String knownFilesType = setEl.getAttribute(SET_TYPE_ATTRIBUTE);
if (knownFilesType.isEmpty()) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, SET_TYPE_ATTRIBUTE + attributeErrorMessage, i);
continue;
}
// Handle legacy known files types.
if (knownFilesType.equals("NSRL")) { //NON-NLS
knownFilesType = HashDb.KnownFilesType.KNOWN.toString();
updatedSchema = true;
}
final String searchDuringIngest = setEl.getAttribute(SEARCH_DURING_INGEST_ATTRIBUTE);
if (searchDuringIngest.isEmpty()) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, SEARCH_DURING_INGEST_ATTRIBUTE + attributeErrorMessage, i);
continue;
}
Boolean seearchDuringIngestFlag = Boolean.parseBoolean(searchDuringIngest);
final String sendIngestMessages = setEl.getAttribute(SEND_INGEST_MESSAGES_ATTRIBUTE);
if (searchDuringIngest.isEmpty()) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, SEND_INGEST_MESSAGES_ATTRIBUTE + attributeErrorMessage, i);
continue;
}
Boolean sendIngestMessagesFlag = Boolean.parseBoolean(sendIngestMessages);
String dbPath;
NodeList pathsNList = setEl.getElementsByTagName(PATH_ELEMENT);
if (pathsNList.getLength() > 0) {
Element pathEl = (Element) pathsNList.item(0); // Shouldn't be more than one.
// Check for legacy path number attribute.
String legacyPathNumber = pathEl.getAttribute(LEGACY_PATH_NUMBER_ATTRIBUTE);
if (null != legacyPathNumber && !legacyPathNumber.isEmpty()) {
updatedSchema = true;
}
dbPath = pathEl.getTextContent();
if (dbPath.isEmpty()) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, PATH_ELEMENT + elementErrorMessage, i);
continue;
}
} else {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, PATH_ELEMENT + elementErrorMessage, i);
continue;
}
dbPath = getValidFilePath(hashSetName, dbPath);
if (null != dbPath) {
try {
addExistingHashDatabaseInternal(hashSetName, dbPath, seearchDuringIngestFlag, sendIngestMessagesFlag, HashDb.KnownFilesType.valueOf(knownFilesType));
} catch (HashDbManagerException | TskCoreException ex) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.SEVERE, "Error opening hash database", ex); //NON-NLS
JOptionPane.showMessageDialog(null,
NbBundle.getMessage(this.getClass(),
"HashDbManager.unableToOpenHashDbMsg", dbPath),
NbBundle.getMessage(this.getClass(), "HashDbManager.openHashDbErr"),
JOptionPane.ERROR_MESSAGE);
}
} else {
Logger.getLogger(HashDbManager.class.getName()).log(Level.WARNING, "No valid path for hash_set at index {0}, cannot make instance of HashDb class", i); //NON-NLS
}
}
if (updatedSchema) {
String backupFilePath = configFilePath + ".v1_backup"; //NON-NLS
String messageBoxTitle = NbBundle.getMessage(this.getClass(),
"HashDbManager.msgBoxTitle.confFileFmtChanged");
String baseMessage = NbBundle.getMessage(this.getClass(),
"HashDbManager.baseMessage.updatedFormatHashDbConfig");
try {
FileUtils.copyFile(new File(configFilePath), new File(backupFilePath));
JOptionPane.showMessageDialog(null,
NbBundle.getMessage(this.getClass(),
"HashDbManager.savedBackupOfOldConfigMsg",
baseMessage, backupFilePath),
messageBoxTitle,
JOptionPane.INFORMATION_MESSAGE);
} catch (IOException ex) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.WARNING, "Failed to save backup of old format configuration file to " + backupFilePath, ex); //NON-NLS
JOptionPane.showMessageDialog(null, baseMessage, messageBoxTitle, JOptionPane.INFORMATION_MESSAGE);
}
writeHashSetConfigurationToDisk();
}
return true;
} else {
File fileSetFile = new File(DB_SERIALIZATION_FILE_PATH);
if (fileSetFile.exists()) {
try {
try (NbObjectInputStream in = new NbObjectInputStream(new FileInputStream(DB_SERIALIZATION_FILE_PATH))) {
HashDbSerializationSettings filesSetsSettings = (HashDbSerializationSettings) in.readObject();
this.setFields(filesSetsSettings);
return true;
}
} catch (IOException | ClassNotFoundException ex) {
throw new PersistenceException(String.format("Failed to read settings from %s", DB_SERIALIZATION_FILE_PATH), ex);
}
} else {
this.setFields(new HashDbSerializationSettings(new ArrayList<>(), new ArrayList<>()));
return true;
}
}
}
private void setFields(HashDbSerializationSettings settings) throws TskCoreException {
this.knownHashSets = settings.getKnownHashSets();
this.knownBadHashSets = settings.getKnownBadHashSets();
this.hashSetNames = new HashSet<>();
this.hashSetPaths = new HashSet<>();
for (HashDbManager.HashDb hashDb : knownHashSets) {
String hashSetName = hashDb.getHashSetName();
if (hashSetNames.contains(hashSetName)) {
int suffix = 0;
String newHashSetName;
do {
++suffix;
newHashSetName = hashSetName + suffix;
} while (hashSetNames.contains(newHashSetName));
}
this.hashSetPaths.add(hashDb.getDatabasePath());
}
}
private String getValidFilePath(String hashSetName, String configuredPath) {
// Check the configured path.
File database = new File(configuredPath);
if (database.exists()) {
return configuredPath;
}
// Give the user an opportunity to find the desired file.
String newPath = null;
if (JOptionPane.showConfirmDialog(null,
NbBundle.getMessage(this.getClass(), "HashDbManager.dlgMsg.dbNotFoundAtLoc",
hashSetName, configuredPath),
NbBundle.getMessage(this.getClass(), "HashDbManager.dlgTitle.MissingDb"),
JOptionPane.YES_NO_OPTION) == JOptionPane.YES_OPTION) {
newPath = searchForFile();
if (null != newPath && !newPath.isEmpty()) {
database = new File(newPath);
if (!database.exists()) {
newPath = null;
}
}
}
return newPath;
}
private String searchForFile() {
String filePath = null;
JFileChooser fc = new JFileChooser();
fc.setDragEnabled(false);
fc.setFileSelectionMode(JFileChooser.FILES_ONLY);
String[] EXTENSION = new String[]{"txt", "idx", "hash", "Hash", "kdb"}; //NON-NLS
FileNameExtensionFilter filter = new FileNameExtensionFilter(
NbBundle.getMessage(this.getClass(), "HashDbManager.fileNameExtensionFilter.title"), EXTENSION);
fc.setFileFilter(filter);
fc.setMultiSelectionEnabled(false);
if (fc.showOpenDialog(null) == JFileChooser.APPROVE_OPTION) {
File f = fc.getSelectedFile();
try {
filePath = f.getCanonicalPath();
} catch (IOException ex) {
Logger.getLogger(HashDbManager.class.getName()).log(Level.WARNING, "Couldn't get selected file path", ex); //NON-NLS
}
}
return filePath;
}
/**
* Instances of this class represent hash databases used to classify files
* as known or know bad.
*/
public static class HashDb implements Serializable {
/**
* Indicates how files with hashes stored in a particular hash database
* object should be classified.
*/
public enum KnownFilesType {
KNOWN(NbBundle.getMessage(HashDbManager.class, "HashDbManager.known.text")),
KNOWN_BAD(NbBundle.getMessage(HashDbManager.class, "HashDbManager.knownBad.text"));
private String displayName;
private KnownFilesType(String displayName) {
this.displayName = displayName;
}
public String getDisplayName() {
return this.displayName;
}
}
/**
* Property change events published by hash database objects.
*/
public enum Event {
INDEXING_DONE
}
private static final long serialVersionUID = 1L;
private int handle;
private String hashSetName;
private boolean searchDuringIngest;
private boolean sendIngestMessages;
private KnownFilesType knownFilesType;
private boolean indexing;
private final PropertyChangeSupport propertyChangeSupport = new PropertyChangeSupport(this);
private HashDb(int handle, String hashSetName, boolean useForIngest, boolean sendHitMessages, KnownFilesType knownFilesType) {
this.handle = handle;
this.hashSetName = hashSetName;
this.searchDuringIngest = useForIngest;
this.sendIngestMessages = sendHitMessages;
this.knownFilesType = knownFilesType;
this.indexing = false;
}
/**
* Adds a listener for the events defined in HashDb.Event.
*/
public void addPropertyChangeListener(PropertyChangeListener pcl) {
propertyChangeSupport.addPropertyChangeListener(pcl);
}
/**
* Removes a listener for the events defined in HashDb.Event.
*/
public void removePropertyChangeListener(PropertyChangeListener pcl) {
propertyChangeSupport.removePropertyChangeListener(pcl);
}
public String getHashSetName() {
return hashSetName;
}
public String getDatabasePath() throws TskCoreException {
return SleuthkitJNI.getHashDatabasePath(handle);
}
public String getIndexPath() throws TskCoreException {
return SleuthkitJNI.getHashDatabaseIndexPath(handle);
}
public KnownFilesType getKnownFilesType() {
return knownFilesType;
}
public boolean getSearchDuringIngest() {
return searchDuringIngest;
}
void setSearchDuringIngest(boolean useForIngest) {
this.searchDuringIngest = useForIngest;
}
public boolean getSendIngestMessages() {
return sendIngestMessages;
}
void setSendIngestMessages(boolean showInboxMessages) {
this.sendIngestMessages = showInboxMessages;
}
/**
* Indicates whether the hash database accepts updates.
*
* @return True if the database accepts updates, false otherwise.
*/
public boolean isUpdateable() throws TskCoreException {
return SleuthkitJNI.isUpdateableHashDatabase(this.handle);
}
/**
* Adds hashes of content (if calculated) to the hash database.
*
* @param content The content for which the calculated hashes, if any,
* are to be added to the hash database.
*
* @throws TskCoreException
*/
public void addHashes(Content content) throws TskCoreException {
addHashes(content, null);
}
/**
* Adds hashes of content (if calculated) to the hash database.
*
* @param content The content for which the calculated hashes, if any,
* are to be added to the hash database.
* @param comment A comment to associate with the hashes, e.g., the name
* of the case in which the content was encountered.
*
* @throws TskCoreException
*/
public void addHashes(Content content, String comment) throws TskCoreException {
// This only works for AbstractFiles and MD5 hashes at present.
assert content instanceof AbstractFile;
if (content instanceof AbstractFile) {
AbstractFile file = (AbstractFile) content;
if (null != file.getMd5Hash()) {
SleuthkitJNI.addToHashDatabase(null, file.getMd5Hash(), null, null, comment, handle);
}
}
}
/**
* Adds a list of hashes to the hash database at once
*
* @param hashes List of hashes
*
* @throws TskCoreException
*/
public void addHashes(List<HashEntry> hashes) throws TskCoreException {
SleuthkitJNI.addToHashDatabase(hashes, handle);
}
/**
* Perform a basic boolean lookup of the file's hash.
*
* @param content
*
* @return True if file's MD5 is in the hash database
*
* @throws TskCoreException
*/
public boolean lookupMD5Quick(Content content) throws TskCoreException {
boolean result = false;
assert content instanceof AbstractFile;
if (content instanceof AbstractFile) {
AbstractFile file = (AbstractFile) content;
if (null != file.getMd5Hash()) {
result = SleuthkitJNI.lookupInHashDatabase(file.getMd5Hash(), handle);
}
}
return result;
}
/**
* Lookup hash value in DB and provide details on file.
*
* @param content
*
* @return null if file is not in database.
*
* @throws TskCoreException
*/
public HashHitInfo lookupMD5(Content content) throws TskCoreException {
HashHitInfo result = null;
// This only works for AbstractFiles and MD5 hashes at present.
assert content instanceof AbstractFile;
if (content instanceof AbstractFile) {
AbstractFile file = (AbstractFile) content;
if (null != file.getMd5Hash()) {
result = SleuthkitJNI.lookupInHashDatabaseVerbose(file.getMd5Hash(), handle);
}
}
return result;
}
boolean hasIndex() throws TskCoreException {
return SleuthkitJNI.hashDatabaseHasLookupIndex(handle);
}
boolean hasIndexOnly() throws TskCoreException {
return SleuthkitJNI.hashDatabaseIsIndexOnly(handle);
}
boolean canBeReIndexed() throws TskCoreException {
return SleuthkitJNI.hashDatabaseCanBeReindexed(handle);
}
boolean isIndexing() {
return indexing;
}
private void close() throws TskCoreException {
SleuthkitJNI.closeHashDatabase(handle);
}
}
/**
* Worker thread to make an index of a database
*/
private class HashDbIndexer extends SwingWorker<Object, Void> {
private ProgressHandle progress = null;
private HashDb hashDb = null;
HashDbIndexer(HashDb hashDb) {
this.hashDb = hashDb;
}
;
@Override
protected Object doInBackground() {
hashDb.indexing = true;
progress = ProgressHandleFactory.createHandle(
NbBundle.getMessage(this.getClass(), "HashDbManager.progress.indexingHashSet", hashDb.hashSetName));
progress.start();
progress.switchToIndeterminate();
try {
SleuthkitJNI.createLookupIndexForHashDatabase(hashDb.handle);
} catch (TskCoreException ex) {
Logger.getLogger(HashDb.class.getName()).log(Level.SEVERE, "Error indexing hash database", ex); //NON-NLS
JOptionPane.showMessageDialog(null,
NbBundle.getMessage(this.getClass(),
"HashDbManager.dlgMsg.errorIndexingHashSet",
hashDb.getHashSetName()),
NbBundle.getMessage(this.getClass(), "HashDbManager.hashDbIndexingErr"),
JOptionPane.ERROR_MESSAGE);
}
return null;
}
@Override
protected void done() {
hashDb.indexing = false;
progress.finish();
// see if we got any errors
try {
get();
} catch (InterruptedException | ExecutionException ex) {
logger.log(Level.SEVERE, "Error creating index", ex); //NON-NLS
MessageNotifyUtil.Notify.show(
NbBundle.getMessage(this.getClass(), "HashDbManager.errCreatingIndex.title"),
NbBundle.getMessage(this.getClass(), "HashDbManager.errCreatingIndex.msg", ex.getMessage()),
MessageNotifyUtil.MessageType.ERROR);
} // catch and ignore if we were cancelled
catch (java.util.concurrent.CancellationException ex) {
}
try {
hashDb.propertyChangeSupport.firePropertyChange(HashDb.Event.INDEXING_DONE.toString(), null, hashDb);
hashDb.propertyChangeSupport.firePropertyChange(HashDbManager.SetEvt.DB_INDEXED.toString(), null, hashDb.getHashSetName());
} catch (Exception e) {
logger.log(Level.SEVERE, "HashDbManager listener threw exception", e); //NON-NLS
MessageNotifyUtil.Notify.show(
NbBundle.getMessage(this.getClass(), "HashDbManager.moduleErr"),
NbBundle.getMessage(this.getClass(), "HashDbManager.moduleErrorListeningToUpdatesMsg"),
MessageNotifyUtil.MessageType.ERROR);
}
}
}
}
| Moving towards serialization of hash dbs
| Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbManager.java | Moving towards serialization of hash dbs |
|
Java | apache-2.0 | 78d376e15098720f68d726e1e71a868867a0eea2 | 0 | alxdarksage/BridgePF,Sage-Bionetworks/BridgePF,DwayneJengSage/BridgePF,Sage-Bionetworks/BridgePF,alxdarksage/BridgePF,DwayneJengSage/BridgePF,DwayneJengSage/BridgePF,Sage-Bionetworks/BridgePF,alxdarksage/BridgePF | package org.sagebionetworks.bridge.config;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import org.jasypt.encryption.pbe.StandardPBEStringEncryptor;
import org.jasypt.properties.EncryptableProperties;
import org.jasypt.salt.StringFixedSaltGenerator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class BridgeConfig {
private final Logger logger = LoggerFactory.getLogger(BridgeConfig.class);
private static final String CONFIG_FILE = "bridge.conf";
private static final String DEFAULT_CONFIG_FILE = "conf/" + CONFIG_FILE;
private static final String USER_CONFIG_FILE = System.getProperty("user.home") + "/" + ".sbt" + "/" + CONFIG_FILE;
// Property name for the environment
private static final String ENVIRONMENT = "bridge.env";
// Property name for the encryption/decryption password
private static final String PASSWORD = "bridge.pwd";
// Property name for the encryption/decryption salt
private static final String SALT = "bridge.salt";
// Predefined environments
private static final String ENV_LOCAL = "local";
private static final String ENV_STUB = "stub";
private static final String ENV_DEV = "dev";
private static final String ENV_PROD = "prod";
private final String environment;
private final Properties properties;
private final ConfigReader envReader = new ConfigReader() {
@Override
public String read(String name) {
try {
return System.getenv(name);
} catch(SecurityException e) {
logger.error("Cannot read environment variable " + name + " because of SecurityException.");
return null;
}
}
};
private final ConfigReader cmdArgReader = new ConfigReader() {
@Override
public String read(String name) {
try {
return System.getProperty(name);
} catch(SecurityException e) {
logger.error("Cannot read system property " + name + " because of SecurityException.");
return null;
}
}
};
public BridgeConfig() {
this(new File(DEFAULT_CONFIG_FILE));
}
public BridgeConfig(File defaultConfig) {
// Load default config from source code
final Properties properties = new Properties();
try {
loadProperties(new FileInputStream(defaultConfig), properties);
} catch (FileNotFoundException e) {
throw new RuntimeException("Missing default config at " + defaultConfig.getAbsolutePath());
}
// Load additional config from the user's sbt home
// This overwrites properties of the same name in the default config
File file = new File(USER_CONFIG_FILE);
loadProperties(file, properties);
// Find out the environment
environment = readEnvironment(properties);
if (environment == null) {
throw new NullPointerException("Environment undetermined.");
}
// Collapse the properties for the current environment
Properties collapsed = collapse(properties, environment);
final StandardPBEStringEncryptor encryptor = new StandardPBEStringEncryptor();
// TODO: Better encryption
// encryptor.setAlgorithm("PBEWithMD5AndTripleDES");
// encryptor.setKeyObtentionIterations(1000);
// encryptor.setSaltGenerator(new RandomSaltGenerator());
// Read the password for encryption/decryption
final String pwd = read(PASSWORD, properties);
encryptor.setPassword(pwd);
final String salt = read(SALT, properties);
encryptor.setSaltGenerator(new StringFixedSaltGenerator(salt));
// Decrypted properties
this.properties = new EncryptableProperties(collapsed, encryptor);
}
public String getEnvironment() {
return environment;
}
public boolean isLocal() {
return ENV_LOCAL.equals(environment);
}
public boolean isStub() {
return ENV_STUB.equals(environment);
}
public boolean isDevelopment() {
return ENV_DEV.equals(environment);
}
public boolean isProduction() {
return ENV_PROD.equals(environment);
}
public String getProperty(String name) {
return properties.getProperty(name);
}
///////////////////////////
private void loadProperties(final InputStream inputStream, final Properties properties) {
try {
properties.load(inputStream);
inputStream.close();
} catch(IOException e) {
throw new RuntimeException(e);
} finally {
try {
inputStream.close();
} catch(IOException e) {
throw new RuntimeException(e);
}
}
}
private void loadProperties(final File file, final Properties properties) {
try {
InputStream inputStream = new FileInputStream(file);
loadProperties(inputStream, properties);
} catch(FileNotFoundException e){
logger.warn(file.getPath() + " not found and is skipped.");
}
}
private String readEnvironment(final Properties properties) {
final String env = read(ENVIRONMENT, properties);
if (env == null) {
logger.info("Environment not set. Is this local development?");
return ENV_LOCAL;
}
if (!ENV_LOCAL.equals(env)
&& !ENV_STUB.equals(env)
&& !ENV_DEV.equals(env)
&& !ENV_PROD.equals(env)) {
throw new RuntimeException("Invalid environment " + env + " from config.");
}
return env;
}
private String read(final String name, final Properties properties) {
// First command line argument (System.getProperty())
String value = cmdArgReader.read(name);
// Then environment variable
if (value == null) {
value = envReader.read(name);
}
// Then properties file
if (value == null) {
value = properties.getProperty(name);
}
return value;
}
/**
* Collapses the properties into new properties relevant to the current environment.
* Start with default properties. Overwrite with properties for the current environment
* and properties read from the environment and the command line.
*/
private Properties collapse(final Properties properties, final String environment) {
Properties collapsed = new Properties();
// Read the default properties
for (Object key : properties.keySet()) {
final String name = key.toString();
if (isDefaultProperty(name)) {
collapsed.setProperty(name, properties.getProperty(name));
}
}
// Overwrite with properties for the current environment
for (Object key : properties.keySet()) {
final String name = key.toString();
if (name.startsWith(environment + ".")) {
String strippedName = name.substring(environment.length() + 1);
collapsed.setProperty(strippedName, properties.getProperty(name));
}
}
// Overwrite with command line arguments and environment variables
for (Object key : collapsed.keySet()) {
final String name = key.toString();
String value = cmdArgReader.read(name);
if (value == null) {
value = envReader.read(name);
}
if (value != null) {
collapsed.setProperty(name, value);
}
}
return collapsed;
}
/**
* If the property is not bound to a particular environment.
*/
private boolean isDefaultProperty(String name) {
if (name.startsWith(ENV_LOCAL + ".")) {
return false;
}
if (name.startsWith(ENV_STUB + ".")) {
return false;
}
if (name.startsWith(ENV_DEV + ".")) {
return false;
}
if (name.startsWith(ENV_PROD + ".")) {
return false;
}
return true;
}
}
| app/org/sagebionetworks/bridge/config/BridgeConfig.java | package org.sagebionetworks.bridge.config;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import org.jasypt.encryption.pbe.StandardPBEStringEncryptor;
import org.jasypt.properties.EncryptableProperties;
import org.jasypt.salt.StringFixedSaltGenerator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class BridgeConfig {
private final Logger logger = LoggerFactory.getLogger(BridgeConfig.class);
private static final String CONFIG_FILE = "bridge.conf";
private static final String DEFAULT_CONFIG_FILE = "conf/" + CONFIG_FILE;
private static final String USER_CONFIG_FILE = System.getProperty("user.home") + "/" + ".sbt" + "/" + CONFIG_FILE;
// Property name for the environment
private static final String ENVIRONMENT = "bridge.env";
// Property name for the encryption/decryption password
private static final String PASSWORD = "bridge.pwd";
// Property name for the encryption/decryption salt
private static final String SALT = "bridge.salt";
// Predefined environments
private static final String ENV_LOCAL = "local";
private static final String ENV_DEV = "dev";
private static final String ENV_PROD = "prod";
private final String environment;
private final Properties properties;
private final ConfigReader envReader = new ConfigReader() {
@Override
public String read(String name) {
try {
return System.getenv(name);
} catch(SecurityException e) {
logger.error("Cannot read environment variable " + name + " because of SecurityException.");
return null;
}
}
};
private final ConfigReader cmdArgReader = new ConfigReader() {
@Override
public String read(String name) {
try {
return System.getProperty(name);
} catch(SecurityException e) {
logger.error("Cannot read system property " + name + " because of SecurityException.");
return null;
}
}
};
public BridgeConfig() {
this(new File(DEFAULT_CONFIG_FILE));
}
public BridgeConfig(File defaultConfig) {
// Load default config from source code
final Properties properties = new Properties();
try {
loadProperties(new FileInputStream(defaultConfig), properties);
} catch (FileNotFoundException e) {
throw new RuntimeException("Missing default config at " + defaultConfig.getAbsolutePath());
}
// Load additional config from the user's sbt home
// This overwrites properties of the same name in the default config
File file = new File(USER_CONFIG_FILE);
loadProperties(file, properties);
// Find out the environment
environment = readEnvironment(properties);
if (environment == null) {
throw new NullPointerException("Environment undetermined.");
}
// Collapse the properties for the current environment
Properties collapsed = collapse(properties, environment);
final StandardPBEStringEncryptor encryptor = new StandardPBEStringEncryptor();
// TODO: Better encryption
// encryptor.setAlgorithm("PBEWithMD5AndTripleDES");
// encryptor.setKeyObtentionIterations(1000);
// encryptor.setSaltGenerator(new RandomSaltGenerator());
// Read the password for encryption/decryption
final String pwd = read(PASSWORD, properties);
encryptor.setPassword(pwd);
final String salt = read(SALT, properties);
encryptor.setSaltGenerator(new StringFixedSaltGenerator(salt));
// Decrypted properties
this.properties = new EncryptableProperties(collapsed, encryptor);
}
public String getEnvironment() {
return environment;
}
public boolean isLocal() {
return ENV_LOCAL.equals(environment);
}
public boolean isDevelopment() {
return ENV_DEV.equals(environment);
}
public boolean isProduction() {
return ENV_PROD.equals(environment);
}
public String getProperty(String name) {
return properties.getProperty(name);
}
///////////////////////////
private void loadProperties(final InputStream inputStream, final Properties properties) {
try {
properties.load(inputStream);
inputStream.close();
} catch(IOException e) {
throw new RuntimeException(e);
} finally {
try {
inputStream.close();
} catch(IOException e) {
throw new RuntimeException(e);
}
}
}
private void loadProperties(final File file, final Properties properties) {
try {
InputStream inputStream = new FileInputStream(file);
loadProperties(inputStream, properties);
} catch(FileNotFoundException e){
logger.warn(file.getPath() + " not found and is skipped.");
}
}
private String readEnvironment(final Properties properties) {
final String env = read(ENVIRONMENT, properties);
if (env == null) {
logger.info("Environment not set. Is this local development?");
return ENV_LOCAL;
}
if (!ENV_LOCAL.equals(env) && !ENV_DEV.equals(env) && !ENV_PROD.equals(env)) {
throw new RuntimeException("Invalid environment " + env + " from config.");
}
return env;
}
private String read(final String name, final Properties properties) {
// First command line argument (System.getProperty())
String value = cmdArgReader.read(name);
// Then environment variable
if (value == null) {
value = envReader.read(name);
}
// Then properties file
if (value == null) {
value = properties.getProperty(name);
}
return value;
}
/**
* Collapses the properties into new properties relevant to the current environment.
* Start with default properties. Overwrite with properties for the current environment
* and properties read from the environment and the command line.
*/
private Properties collapse(final Properties properties, final String environment) {
Properties collapsed = new Properties();
// Read the default properties
for (Object key : properties.keySet()) {
final String name = key.toString();
if (isDefaultProperty(name)) {
collapsed.setProperty(name, properties.getProperty(name));
}
}
// Overwrite with properties for the current environment
for (Object key : properties.keySet()) {
final String name = key.toString();
if (name.startsWith(environment + ".")) {
String strippedName = name.substring(environment.length() + 1);
collapsed.setProperty(strippedName, properties.getProperty(name));
}
}
// Overwrite with command line arguments and environment variables
for (Object key : collapsed.keySet()) {
final String name = key.toString();
String value = cmdArgReader.read(name);
if (value == null) {
value = envReader.read(name);
}
if (value != null) {
collapsed.setProperty(name, value);
}
}
return collapsed;
}
/**
* If the property is not bound to a particular environment.
*/
private boolean isDefaultProperty(String name) {
if (name.startsWith(ENV_LOCAL + ".")) {
return false;
}
if (name.startsWith(ENV_DEV + ".")) {
return false;
}
if (name.startsWith(ENV_PROD + ".")) {
return false;
}
return true;
}
}
| Add the stub environment.
| app/org/sagebionetworks/bridge/config/BridgeConfig.java | Add the stub environment. |
|
Java | apache-2.0 | 0bf8a516e33816e3f8f787ef9a549905ea28bf97 | 0 | electrum/presto,ebyhr/presto,losipiuk/presto,ebyhr/presto,Praveen2112/presto,electrum/presto,Praveen2112/presto,smartnews/presto,electrum/presto,ebyhr/presto,smartnews/presto,erichwang/presto,11xor6/presto,Praveen2112/presto,erichwang/presto,erichwang/presto,dain/presto,11xor6/presto,dain/presto,11xor6/presto,losipiuk/presto,losipiuk/presto,electrum/presto,ebyhr/presto,dain/presto,smartnews/presto,smartnews/presto,11xor6/presto,dain/presto,erichwang/presto,dain/presto,Praveen2112/presto,electrum/presto,ebyhr/presto,Praveen2112/presto,smartnews/presto,erichwang/presto,losipiuk/presto,11xor6/presto,losipiuk/presto | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.testng.services;
import io.prestosql.tempto.Requirement;
import io.prestosql.tempto.Requirements;
import io.prestosql.tempto.RequirementsProvider;
import io.prestosql.tempto.configuration.Configuration;
import io.prestosql.tempto.testmarkers.WithName;
import org.testng.annotations.Test;
import java.lang.reflect.Method;
import static io.prestosql.testng.services.ReportUnannotatedMethods.findUnannotatedTestMethods;
import static io.prestosql.testng.services.ReportUnannotatedMethods.isTemptoClass;
import static org.assertj.core.api.Assertions.assertThat;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
public class TestReportUnannotatedMethods
{
@Test
public void testTest()
{
assertThat(findUnannotatedTestMethods(TestingTest.class))
.isEmpty();
assertThat(findUnannotatedTestMethods(TestingTestWithProxy.class))
.isEmpty();
}
@Test
public void testTestWithoutTestAnnotation()
{
assertThat(findUnannotatedTestMethods(TestingTestWithoutTestAnnotation.class))
.extracting(Method::getName)
.containsExactly("testWithMissingTestAnnotation", "methodInInterface");
}
@Test
public void testTemptoRequirementsProvider()
{
assertThat(findUnannotatedTestMethods(TestingRequirementsProvider.class))
.extracting(Method::getName)
.containsExactly("testWithMissingTestAnnotation");
assertThat(findUnannotatedTestMethods(TestingRequirementsProviderWithProxyClass.class))
.extracting(Method::getName)
.containsExactly("testWithMissingTestAnnotation", "testWithMissingTestAnnotationInProxy");
}
@Test
public void testTemptoPackage()
{
assertTrue(isTemptoClass(RequirementsProvider.class));
assertTrue(isTemptoClass(WithName.class));
assertFalse(isTemptoClass(getClass()));
}
@Test
public void testSuppressedMethods()
{
assertThat(findUnannotatedTestMethods(TestingTestWithSuppressedPublicMethod.class))
.isEmpty();
assertThat(findUnannotatedTestMethods(TestingTestWithSuppressedPublicMethodInInterface.class))
.isEmpty();
}
private static class TestingTest
implements TestingInterfaceWithTest
{
@Test
public void test() {}
}
private static class TestingTestWithProxy
extends TestingInterfaceWithTestProxy
{
@Test
public void test() {}
}
private static class TestingTestWithoutTestAnnotation
implements TestingInterface
{
public void testWithMissingTestAnnotation() {}
@Override
public String toString()
{
return "test override";
}
}
private static class TestingRequirementsProvider
implements RequirementsProvider
{
@Override
public Requirement getRequirements(Configuration configuration)
{
return Requirements.allOf();
}
public void testWithMissingTestAnnotation() {}
}
private static class TestingRequirementsProviderWithProxyClass
extends RequirementsProviderProxy
{
@Override
public Requirement getRequirements(Configuration configuration)
{
return Requirements.allOf();
}
public void testWithMissingTestAnnotation() {}
}
private abstract static class RequirementsProviderProxy
implements RequirementsProvider
{
public void testWithMissingTestAnnotationInProxy() {}
}
private static class TestingInterfaceWithTestProxy
implements TestingInterfaceWithTest {}
private interface TestingInterfaceWithTest
{
@Test
default void testInInterface() {}
}
private interface TestingInterface
{
default void methodInInterface() {}
}
private static class TestingTestWithSuppressedPublicMethod
{
@Test
public void test() {}
@ReportUnannotatedMethods.Suppress
public void method() {}
}
private static class TestingTestWithSuppressedPublicMethodInInterface
implements InterfaceWithSuppressedPublicMethod
{
@Test
public void test() {}
}
private interface InterfaceWithSuppressedPublicMethod
{
@ReportUnannotatedMethods.Suppress
default void method() {}
}
}
| presto-testng-services/src/test/java/io/prestosql/testng/services/TestReportUnannotatedMethods.java | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.testng.services;
import io.prestosql.tempto.Requirement;
import io.prestosql.tempto.Requirements;
import io.prestosql.tempto.RequirementsProvider;
import io.prestosql.tempto.configuration.Configuration;
import io.prestosql.tempto.testmarkers.WithName;
import org.testng.annotations.Test;
import java.lang.reflect.Method;
import static io.prestosql.testng.services.ReportUnannotatedMethods.isTemptoClass;
import static org.assertj.core.api.Assertions.assertThat;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
public class TestReportUnannotatedMethods
{
private final ReportUnannotatedMethods instance = new ReportUnannotatedMethods();
@Test
public void testTest()
{
assertThat(instance.findUnannotatedTestMethods(TestingTest.class))
.isEmpty();
assertThat(instance.findUnannotatedTestMethods(TestingTestWithProxy.class))
.isEmpty();
}
@Test
public void testTestWithoutTestAnnotation()
{
assertThat(instance.findUnannotatedTestMethods(TestingTestWithoutTestAnnotation.class))
.extracting(Method::getName)
.containsExactly("testWithMissingTestAnnotation", "methodInInterface");
}
@Test
public void testTemptoRequirementsProvider()
{
assertThat(instance.findUnannotatedTestMethods(TestingRequirementsProvider.class))
.extracting(Method::getName)
.containsExactly("testWithMissingTestAnnotation");
assertThat(instance.findUnannotatedTestMethods(TestingRequirementsProviderWithProxyClass.class))
.extracting(Method::getName)
.containsExactly("testWithMissingTestAnnotation", "testWithMissingTestAnnotationInProxy");
}
@Test
public void testTemptoPackage()
{
assertTrue(isTemptoClass(RequirementsProvider.class));
assertTrue(isTemptoClass(WithName.class));
assertFalse(isTemptoClass(getClass()));
}
@Test
public void testSuppressedMethods()
{
assertThat(instance.findUnannotatedTestMethods(TestingTestWithSuppressedPublicMethod.class))
.isEmpty();
assertThat(instance.findUnannotatedTestMethods(TestingTestWithSuppressedPublicMethodInInterface.class))
.isEmpty();
}
private static class TestingTest
implements TestingInterfaceWithTest
{
@Test
public void test() {}
}
private static class TestingTestWithProxy
extends TestingInterfaceWithTestProxy
{
@Test
public void test() {}
}
private static class TestingTestWithoutTestAnnotation
implements TestingInterface
{
public void testWithMissingTestAnnotation() {}
@Override
public String toString()
{
return "test override";
}
}
private static class TestingRequirementsProvider
implements RequirementsProvider
{
@Override
public Requirement getRequirements(Configuration configuration)
{
return Requirements.allOf();
}
public void testWithMissingTestAnnotation() {}
}
private static class TestingRequirementsProviderWithProxyClass
extends RequirementsProviderProxy
{
@Override
public Requirement getRequirements(Configuration configuration)
{
return Requirements.allOf();
}
public void testWithMissingTestAnnotation() {}
}
private abstract static class RequirementsProviderProxy
implements RequirementsProvider
{
public void testWithMissingTestAnnotationInProxy() {}
}
private static class TestingInterfaceWithTestProxy
implements TestingInterfaceWithTest {}
private interface TestingInterfaceWithTest
{
@Test
default void testInInterface() {}
}
private interface TestingInterface
{
default void methodInInterface() {}
}
private static class TestingTestWithSuppressedPublicMethod
{
@Test
public void test() {}
@ReportUnannotatedMethods.Suppress
public void method() {}
}
private static class TestingTestWithSuppressedPublicMethodInInterface
implements InterfaceWithSuppressedPublicMethod
{
@Test
public void test() {}
}
private interface InterfaceWithSuppressedPublicMethod
{
@ReportUnannotatedMethods.Suppress
default void method() {}
}
}
| Do not call static method via instance
| presto-testng-services/src/test/java/io/prestosql/testng/services/TestReportUnannotatedMethods.java | Do not call static method via instance |
|
Java | apache-2.0 | addeb30773024799aee0adfb26e331f93913e069 | 0 | PATRIC3/p3_solr,PATRIC3/p3_solr,PATRIC3/p3_solr,PATRIC3/p3_solr,PATRIC3/p3_solr,PATRIC3/p3_solr,PATRIC3/p3_solr | package org.apache.solr.update;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletionService;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrServer;
import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
import org.apache.solr.client.solrj.request.UpdateRequestExt;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.cloud.ZkCoreNodeProps;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrCore;
import org.apache.solr.util.AdjustableSemaphore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SolrCmdDistributor {
private static final int MAX_RETRIES_ON_FORWARD = 15;
public static Logger log = LoggerFactory.getLogger(SolrCmdDistributor.class);
static AdjustableSemaphore semaphore = new AdjustableSemaphore(8);
CompletionService<Request> completionService;
Set<Future<Request>> pending;
int maxBufferedAddsPerServer = 10;
int maxBufferedDeletesPerServer = 10;
private Response response = new Response();
private final Map<Node,List<AddRequest>> adds = new HashMap<Node,List<AddRequest>>();
private final Map<Node,List<DeleteRequest>> deletes = new HashMap<Node,List<DeleteRequest>>();
private UpdateShardHandler updateShardHandler;
class AddRequest {
AddUpdateCommand cmd;
ModifiableSolrParams params;
}
class DeleteRequest {
DeleteUpdateCommand cmd;
ModifiableSolrParams params;
}
public static interface AbortCheck {
public boolean abortCheck();
}
public SolrCmdDistributor(int numHosts, UpdateShardHandler updateShardHandler) {
int maxPermits = Math.max(16, numHosts * 16);
// limits how many tasks can actually execute at once
if (maxPermits != semaphore.getMaxPermits()) {
semaphore.setMaxPermits(maxPermits);
}
this.updateShardHandler = updateShardHandler;
completionService = new ExecutorCompletionService<Request>(updateShardHandler.getCmdDistribExecutor());
pending = new HashSet<Future<Request>>();
}
public void finish() {
flushAdds(1);
flushDeletes(1);
checkResponses(true);
}
public void distribDelete(DeleteUpdateCommand cmd, List<Node> urls, ModifiableSolrParams params) throws IOException {
checkResponses(false);
if (cmd.isDeleteById()) {
doDelete(cmd, urls, params);
} else {
doDelete(cmd, urls, params);
}
}
public void distribAdd(AddUpdateCommand cmd, List<Node> nodes, ModifiableSolrParams params) throws IOException {
checkResponses(false);
// make sure any pending deletes are flushed
flushDeletes(1);
// TODO: this is brittle
// need to make a clone since these commands may be reused
AddUpdateCommand clone = new AddUpdateCommand(null);
clone.solrDoc = cmd.solrDoc;
clone.commitWithin = cmd.commitWithin;
clone.overwrite = cmd.overwrite;
clone.setVersion(cmd.getVersion());
AddRequest addRequest = new AddRequest();
addRequest.cmd = clone;
addRequest.params = params;
for (Node node : nodes) {
List<AddRequest> alist = adds.get(node);
if (alist == null) {
alist = new ArrayList<AddRequest>(2);
adds.put(node, alist);
}
alist.add(addRequest);
}
flushAdds(maxBufferedAddsPerServer);
}
public void distribCommit(CommitUpdateCommand cmd, List<Node> nodes,
ModifiableSolrParams params) throws IOException {
// make sure we are ordered
flushAdds(1);
flushDeletes(1);
// Wait for all outstanding responses to make sure that a commit
// can't sneak in ahead of adds or deletes we already sent.
// We could do this on a per-server basis, but it's more complex
// and this solution will lead to commits happening closer together.
checkResponses(true);
// currently, we dont try to piggy back on outstanding adds or deletes
UpdateRequestExt ureq = new UpdateRequestExt();
ureq.setParams(params);
addCommit(ureq, cmd);
log.info("Distrib commit to:" + nodes + " params:" + params);
for (Node node : nodes) {
submit(ureq, node);
}
// if the command wanted to block until everything was committed,
// then do that here.
if (cmd.waitSearcher) {
checkResponses(true);
}
}
private void doDelete(DeleteUpdateCommand cmd, List<Node> nodes,
ModifiableSolrParams params) {
flushAdds(1);
DeleteUpdateCommand clonedCmd = clone(cmd);
DeleteRequest deleteRequest = new DeleteRequest();
deleteRequest.cmd = clonedCmd;
deleteRequest.params = params;
for (Node node : nodes) {
List<DeleteRequest> dlist = deletes.get(node);
if (dlist == null) {
dlist = new ArrayList<DeleteRequest>(2);
deletes.put(node, dlist);
}
dlist.add(deleteRequest);
}
flushDeletes(maxBufferedDeletesPerServer);
}
void addCommit(UpdateRequestExt ureq, CommitUpdateCommand cmd) {
if (cmd == null) return;
ureq.setAction(cmd.optimize ? AbstractUpdateRequest.ACTION.OPTIMIZE
: AbstractUpdateRequest.ACTION.COMMIT, false, cmd.waitSearcher, cmd.maxOptimizeSegments, cmd.softCommit, cmd.expungeDeletes);
}
boolean flushAdds(int limit) {
// check for pending deletes
Set<Node> removeNodes = new HashSet<Node>();
Set<Node> nodes = adds.keySet();
for (Node node : nodes) {
List<AddRequest> alist = adds.get(node);
if (alist == null || alist.size() < limit) continue;
UpdateRequestExt ureq = new UpdateRequestExt();
ModifiableSolrParams combinedParams = new ModifiableSolrParams();
for (AddRequest aReq : alist) {
AddUpdateCommand cmd = aReq.cmd;
combinedParams.add(aReq.params);
ureq.add(cmd.solrDoc, cmd.commitWithin, cmd.overwrite);
}
if (ureq.getParams() == null) ureq.setParams(new ModifiableSolrParams());
ureq.getParams().add(combinedParams);
removeNodes.add(node);
submit(ureq, node);
}
for (Node node : removeNodes) {
adds.remove(node);
}
return true;
}
boolean flushDeletes(int limit) {
// check for pending deletes
Set<Node> removeNodes = new HashSet<Node>();
Set<Node> nodes = deletes.keySet();
for (Node node : nodes) {
List<DeleteRequest> dlist = deletes.get(node);
if (dlist == null || dlist.size() < limit) continue;
UpdateRequestExt ureq = new UpdateRequestExt();
ModifiableSolrParams combinedParams = new ModifiableSolrParams();
for (DeleteRequest dReq : dlist) {
DeleteUpdateCommand cmd = dReq.cmd;
combinedParams.add(dReq.params);
if (cmd.isDeleteById()) {
ureq.deleteById(cmd.getId(), cmd.getVersion());
} else {
ureq.deleteByQuery(cmd.query);
}
if (ureq.getParams() == null) ureq
.setParams(new ModifiableSolrParams());
ureq.getParams().add(combinedParams);
}
removeNodes.add(node);
submit(ureq, node);
}
for (Node node : removeNodes) {
deletes.remove(node);
}
return true;
}
private DeleteUpdateCommand clone(DeleteUpdateCommand cmd) {
DeleteUpdateCommand c = (DeleteUpdateCommand)cmd.clone();
// TODO: shouldnt the clone do this?
c.setFlags(cmd.getFlags());
c.setVersion(cmd.getVersion());
return c;
}
public static class Request {
public Node node;
UpdateRequestExt ureq;
NamedList<Object> ursp;
int rspCode;
public Exception exception;
int retries;
}
void submit(UpdateRequestExt ureq, Node node) {
Request sreq = new Request();
sreq.node = node;
sreq.ureq = ureq;
submit(sreq);
}
public void submit(final Request sreq) {
final String url = sreq.node.getUrl();
Callable<Request> task = new Callable<Request>() {
@Override
public Request call() throws Exception {
Request clonedRequest = null;
try {
clonedRequest = new Request();
clonedRequest.node = sreq.node;
clonedRequest.ureq = sreq.ureq;
clonedRequest.retries = sreq.retries;
String fullUrl;
if (!url.startsWith("http://") && !url.startsWith("https://")) {
fullUrl = "http://" + url;
} else {
fullUrl = url;
}
HttpSolrServer server = new HttpSolrServer(fullUrl,
updateShardHandler.getHttpClient());
if (Thread.currentThread().isInterrupted()) {
clonedRequest.rspCode = 503;
clonedRequest.exception = new SolrException(ErrorCode.SERVICE_UNAVAILABLE, "Shutting down.");
return clonedRequest;
}
clonedRequest.ursp = server.request(clonedRequest.ureq);
// currently no way to get the request body.
} catch (Exception e) {
clonedRequest.exception = e;
if (e instanceof SolrException) {
clonedRequest.rspCode = ((SolrException) e).code();
} else {
clonedRequest.rspCode = -1;
}
} finally {
semaphore.release();
}
return clonedRequest;
}
};
try {
semaphore.acquire();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, "Update thread interrupted", e);
}
try {
pending.add(completionService.submit(task));
} catch (RejectedExecutionException e) {
semaphore.release();
throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, "Shutting down", e);
}
}
void checkResponses(boolean block) {
while (pending != null && pending.size() > 0) {
try {
Future<Request> future = block ? completionService.take()
: completionService.poll();
if (future == null) return;
pending.remove(future);
try {
Request sreq = future.get();
if (sreq.rspCode != 0) {
// error during request
// if there is a retry url, we want to retry...
boolean isRetry = sreq.node.checkRetry();
boolean doRetry = false;
int rspCode = sreq.rspCode;
// this can happen in certain situations such as shutdown
if (isRetry) {
if (rspCode == 404 || rspCode == 403 || rspCode == 503
|| rspCode == 500) {
doRetry = true;
}
// if its an ioexception, lets try again
if (sreq.exception instanceof IOException) {
doRetry = true;
} else if (sreq.exception instanceof SolrServerException) {
if (((SolrServerException) sreq.exception).getRootCause() instanceof IOException) {
doRetry = true;
}
}
}
if (isRetry && sreq.retries < MAX_RETRIES_ON_FORWARD && doRetry) {
sreq.retries++;
sreq.rspCode = 0;
sreq.exception = null;
SolrException.log(SolrCmdDistributor.log, "forwarding update to " + sreq.node.getUrl() + " failed - retrying ... ");
Thread.sleep(500);
submit(sreq);
} else {
Exception e = sreq.exception;
Error error = new Error();
error.e = e;
error.node = sreq.node;
response.errors.add(error);
response.sreq = sreq;
SolrException.log(SolrCmdDistributor.log, "shard update error "
+ sreq.node, sreq.exception);
}
}
} catch (ExecutionException e) {
// shouldn't happen since we catch exceptions ourselves
SolrException.log(SolrCore.log,
"error sending update request to shard", e);
}
} catch (InterruptedException e) {
throw new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE,
"interrupted waiting for shard update response", e);
}
}
}
public static class Response {
public Request sreq;
public List<Error> errors = new ArrayList<Error>();
}
public static class Error {
public Node node;
public Exception e;
}
public Response getResponse() {
return response;
}
public static abstract class Node {
public abstract String getUrl();
public abstract boolean checkRetry();
public abstract String getCoreName();
public abstract String getBaseUrl();
public abstract ZkCoreNodeProps getNodeProps();
}
public static class StdNode extends Node {
protected String url;
protected String baseUrl;
protected String coreName;
private ZkCoreNodeProps nodeProps;
public StdNode(ZkCoreNodeProps nodeProps) {
this.url = nodeProps.getCoreUrl();
this.baseUrl = nodeProps.getBaseUrl();
this.coreName = nodeProps.getCoreName();
this.nodeProps = nodeProps;
}
@Override
public String getUrl() {
return url;
}
@Override
public String toString() {
return this.getClass().getSimpleName() + ": " + url;
}
@Override
public boolean checkRetry() {
return false;
}
@Override
public String getBaseUrl() {
return baseUrl;
}
@Override
public String getCoreName() {
return coreName;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((baseUrl == null) ? 0 : baseUrl.hashCode());
result = prime * result + ((coreName == null) ? 0 : coreName.hashCode());
result = prime * result + ((url == null) ? 0 : url.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
StdNode other = (StdNode) obj;
if (baseUrl == null) {
if (other.baseUrl != null) return false;
} else if (!baseUrl.equals(other.baseUrl)) return false;
if (coreName == null) {
if (other.coreName != null) return false;
} else if (!coreName.equals(other.coreName)) return false;
if (url == null) {
if (other.url != null) return false;
} else if (!url.equals(other.url)) return false;
return true;
}
@Override
public ZkCoreNodeProps getNodeProps() {
return nodeProps;
}
}
}
| solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java | package org.apache.solr.update;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletionService;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrServer;
import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
import org.apache.solr.client.solrj.request.UpdateRequestExt;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.cloud.ZkCoreNodeProps;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrCore;
import org.apache.solr.util.AdjustableSemaphore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SolrCmdDistributor {
private static final int MAX_RETRIES_ON_FORWARD = 15;
public static Logger log = LoggerFactory.getLogger(SolrCmdDistributor.class);
static AdjustableSemaphore semaphore = new AdjustableSemaphore(8);
CompletionService<Request> completionService;
Set<Future<Request>> pending;
int maxBufferedAddsPerServer = 10;
int maxBufferedDeletesPerServer = 10;
private Response response = new Response();
private final Map<Node,List<AddRequest>> adds = new HashMap<Node,List<AddRequest>>();
private final Map<Node,List<DeleteRequest>> deletes = new HashMap<Node,List<DeleteRequest>>();
private UpdateShardHandler updateShardHandler;
class AddRequest {
AddUpdateCommand cmd;
ModifiableSolrParams params;
}
class DeleteRequest {
DeleteUpdateCommand cmd;
ModifiableSolrParams params;
}
public static interface AbortCheck {
public boolean abortCheck();
}
public SolrCmdDistributor(int numHosts, UpdateShardHandler updateShardHandler) {
int maxPermits = Math.max(16, numHosts * 16);
// limits how many tasks can actually execute at once
if (maxPermits != semaphore.getMaxPermits()) {
semaphore.setMaxPermits(maxPermits);
}
this.updateShardHandler = updateShardHandler;
completionService = new ExecutorCompletionService<Request>(updateShardHandler.getCmdDistribExecutor());
pending = new HashSet<Future<Request>>();
}
public void finish() {
flushAdds(1);
flushDeletes(1);
checkResponses(true);
}
public void distribDelete(DeleteUpdateCommand cmd, List<Node> urls, ModifiableSolrParams params) throws IOException {
checkResponses(false);
if (cmd.isDeleteById()) {
doDelete(cmd, urls, params);
} else {
doDelete(cmd, urls, params);
}
}
public void distribAdd(AddUpdateCommand cmd, List<Node> nodes, ModifiableSolrParams params) throws IOException {
checkResponses(false);
// make sure any pending deletes are flushed
flushDeletes(1);
// TODO: this is brittle
// need to make a clone since these commands may be reused
AddUpdateCommand clone = new AddUpdateCommand(null);
clone.solrDoc = cmd.solrDoc;
clone.commitWithin = cmd.commitWithin;
clone.overwrite = cmd.overwrite;
clone.setVersion(cmd.getVersion());
AddRequest addRequest = new AddRequest();
addRequest.cmd = clone;
addRequest.params = params;
for (Node node : nodes) {
List<AddRequest> alist = adds.get(node);
if (alist == null) {
alist = new ArrayList<AddRequest>(2);
adds.put(node, alist);
}
alist.add(addRequest);
}
flushAdds(maxBufferedAddsPerServer);
}
public void distribCommit(CommitUpdateCommand cmd, List<Node> nodes,
ModifiableSolrParams params) throws IOException {
// make sure we are ordered
flushAdds(1);
flushDeletes(1);
// Wait for all outstanding responses to make sure that a commit
// can't sneak in ahead of adds or deletes we already sent.
// We could do this on a per-server basis, but it's more complex
// and this solution will lead to commits happening closer together.
checkResponses(true);
// currently, we dont try to piggy back on outstanding adds or deletes
UpdateRequestExt ureq = new UpdateRequestExt();
ureq.setParams(params);
addCommit(ureq, cmd);
log.info("Distrib commit to:" + nodes + " params:" + params);
for (Node node : nodes) {
submit(ureq, node);
}
// if the command wanted to block until everything was committed,
// then do that here.
if (cmd.waitSearcher) {
checkResponses(true);
}
}
private void doDelete(DeleteUpdateCommand cmd, List<Node> nodes,
ModifiableSolrParams params) {
flushAdds(1);
DeleteUpdateCommand clonedCmd = clone(cmd);
DeleteRequest deleteRequest = new DeleteRequest();
deleteRequest.cmd = clonedCmd;
deleteRequest.params = params;
for (Node node : nodes) {
List<DeleteRequest> dlist = deletes.get(node);
if (dlist == null) {
dlist = new ArrayList<DeleteRequest>(2);
deletes.put(node, dlist);
}
dlist.add(deleteRequest);
}
flushDeletes(maxBufferedDeletesPerServer);
}
void addCommit(UpdateRequestExt ureq, CommitUpdateCommand cmd) {
if (cmd == null) return;
ureq.setAction(cmd.optimize ? AbstractUpdateRequest.ACTION.OPTIMIZE
: AbstractUpdateRequest.ACTION.COMMIT, false, cmd.waitSearcher, cmd.maxOptimizeSegments, cmd.softCommit, cmd.expungeDeletes);
}
boolean flushAdds(int limit) {
// check for pending deletes
Set<Node> removeNodes = new HashSet<Node>();
Set<Node> nodes = adds.keySet();
for (Node node : nodes) {
List<AddRequest> alist = adds.get(node);
if (alist == null || alist.size() < limit) continue;
UpdateRequestExt ureq = new UpdateRequestExt();
ModifiableSolrParams combinedParams = new ModifiableSolrParams();
for (AddRequest aReq : alist) {
AddUpdateCommand cmd = aReq.cmd;
combinedParams.add(aReq.params);
ureq.add(cmd.solrDoc, cmd.commitWithin, cmd.overwrite);
}
if (ureq.getParams() == null) ureq.setParams(new ModifiableSolrParams());
ureq.getParams().add(combinedParams);
removeNodes.add(node);
submit(ureq, node);
}
for (Node node : removeNodes) {
adds.remove(node);
}
return true;
}
boolean flushDeletes(int limit) {
// check for pending deletes
Set<Node> removeNodes = new HashSet<Node>();
Set<Node> nodes = deletes.keySet();
for (Node node : nodes) {
List<DeleteRequest> dlist = deletes.get(node);
if (dlist == null || dlist.size() < limit) continue;
UpdateRequestExt ureq = new UpdateRequestExt();
ModifiableSolrParams combinedParams = new ModifiableSolrParams();
for (DeleteRequest dReq : dlist) {
DeleteUpdateCommand cmd = dReq.cmd;
combinedParams.add(dReq.params);
if (cmd.isDeleteById()) {
ureq.deleteById(cmd.getId(), cmd.getVersion());
} else {
ureq.deleteByQuery(cmd.query);
}
if (ureq.getParams() == null) ureq
.setParams(new ModifiableSolrParams());
ureq.getParams().add(combinedParams);
}
removeNodes.add(node);
submit(ureq, node);
}
for (Node node : removeNodes) {
deletes.remove(node);
}
return true;
}
private DeleteUpdateCommand clone(DeleteUpdateCommand cmd) {
DeleteUpdateCommand c = (DeleteUpdateCommand)cmd.clone();
// TODO: shouldnt the clone do this?
c.setFlags(cmd.getFlags());
c.setVersion(cmd.getVersion());
return c;
}
public static class Request {
public Node node;
UpdateRequestExt ureq;
NamedList<Object> ursp;
int rspCode;
public Exception exception;
int retries;
}
void submit(UpdateRequestExt ureq, Node node) {
Request sreq = new Request();
sreq.node = node;
sreq.ureq = ureq;
submit(sreq);
}
public void submit(final Request sreq) {
final String url = sreq.node.getUrl();
Callable<Request> task = new Callable<Request>() {
@Override
public Request call() throws Exception {
Request clonedRequest = null;
try {
clonedRequest = new Request();
clonedRequest.node = sreq.node;
clonedRequest.ureq = sreq.ureq;
clonedRequest.retries = sreq.retries;
String fullUrl;
if (!url.startsWith("http://") && !url.startsWith("https://")) {
fullUrl = "http://" + url;
} else {
fullUrl = url;
}
HttpSolrServer server = new HttpSolrServer(fullUrl,
updateShardHandler.getHttpClient());
if (Thread.currentThread().isInterrupted()) {
clonedRequest.rspCode = 503;
clonedRequest.exception = new SolrException(ErrorCode.SERVICE_UNAVAILABLE, "Shutting down.");
return clonedRequest;
}
clonedRequest.ursp = server.request(clonedRequest.ureq);
// currently no way to get the request body.
} catch (Exception e) {
clonedRequest.exception = e;
if (e instanceof SolrException) {
clonedRequest.rspCode = ((SolrException) e).code();
} else {
clonedRequest.rspCode = -1;
}
} finally {
semaphore.release();
}
return clonedRequest;
}
};
try {
semaphore.acquire();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, "Update thread interrupted", e);
}
try {
pending.add(completionService.submit(task));
} catch (RejectedExecutionException e) {
semaphore.release();
throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, "Shutting down", e);
}
}
void checkResponses(boolean block) {
while (pending != null && pending.size() > 0) {
try {
Future<Request> future = block ? completionService.take()
: completionService.poll();
if (future == null) return;
pending.remove(future);
try {
Request sreq = future.get();
if (sreq.rspCode != 0) {
// error during request
// if there is a retry url, we want to retry...
boolean isRetry = sreq.node.checkRetry();
boolean doRetry = false;
int rspCode = sreq.rspCode;
// this can happen in certain situations such as shutdown
if (isRetry) {
if (rspCode == 404 || rspCode == 403 || rspCode == 503
|| rspCode == 500) {
doRetry = true;
}
// if its an ioexception, lets try again
if (sreq.exception instanceof IOException) {
doRetry = true;
} else if (sreq.exception instanceof SolrServerException) {
if (((SolrServerException) sreq.exception).getRootCause() instanceof IOException) {
doRetry = true;
}
}
}
if (isRetry && sreq.retries < MAX_RETRIES_ON_FORWARD && doRetry) {
sreq.retries++;
sreq.rspCode = 0;
sreq.exception = null;
SolrException.log(SolrCmdDistributor.log, "forwarding update to " + sreq.node.getUrl() + " failed - retrying ... ");
Thread.sleep(500);
submit(sreq);
checkResponses(block);
} else {
Exception e = sreq.exception;
Error error = new Error();
error.e = e;
error.node = sreq.node;
response.errors.add(error);
response.sreq = sreq;
SolrException.log(SolrCmdDistributor.log, "shard update error "
+ sreq.node, sreq.exception);
}
}
} catch (ExecutionException e) {
// shouldn't happen since we catch exceptions ourselves
SolrException.log(SolrCore.log,
"error sending update request to shard", e);
}
} catch (InterruptedException e) {
throw new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE,
"interrupted waiting for shard update response", e);
}
}
}
public static class Response {
public Request sreq;
public List<Error> errors = new ArrayList<Error>();
}
public static class Error {
public Node node;
public Exception e;
}
public Response getResponse() {
return response;
}
public static abstract class Node {
public abstract String getUrl();
public abstract boolean checkRetry();
public abstract String getCoreName();
public abstract String getBaseUrl();
public abstract ZkCoreNodeProps getNodeProps();
}
public static class StdNode extends Node {
protected String url;
protected String baseUrl;
protected String coreName;
private ZkCoreNodeProps nodeProps;
public StdNode(ZkCoreNodeProps nodeProps) {
this.url = nodeProps.getCoreUrl();
this.baseUrl = nodeProps.getBaseUrl();
this.coreName = nodeProps.getCoreName();
this.nodeProps = nodeProps;
}
@Override
public String getUrl() {
return url;
}
@Override
public String toString() {
return this.getClass().getSimpleName() + ": " + url;
}
@Override
public boolean checkRetry() {
return false;
}
@Override
public String getBaseUrl() {
return baseUrl;
}
@Override
public String getCoreName() {
return coreName;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((baseUrl == null) ? 0 : baseUrl.hashCode());
result = prime * result + ((coreName == null) ? 0 : coreName.hashCode());
result = prime * result + ((url == null) ? 0 : url.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
StdNode other = (StdNode) obj;
if (baseUrl == null) {
if (other.baseUrl != null) return false;
} else if (!baseUrl.equals(other.baseUrl)) return false;
if (coreName == null) {
if (other.coreName != null) return false;
} else if (!coreName.equals(other.coreName)) return false;
if (url == null) {
if (other.url != null) return false;
} else if (!url.equals(other.url)) return false;
return true;
}
@Override
public ZkCoreNodeProps getNodeProps() {
return nodeProps;
}
}
}
| SOLR-3180: remove bad method call
git-svn-id: 13f9c63152c129021c7e766f4ef575faaaa595a2@1427590 13f79535-47bb-0310-9956-ffa450edef68
| solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java | SOLR-3180: remove bad method call |
|
Java | apache-2.0 | e3090ce29d99f66d1378dee1629c7e256d99b465 | 0 | autermann/iceland,nuest/iceland,CarstenHollmann/iceland,autermann/iceland,nuest/iceland | /*
* Copyright 2015 52°North Initiative for Geospatial Open Source
* Software GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.n52.iceland.ogc.ows;
import com.google.common.base.Splitter;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.ADDRESS;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.CITY;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.COUNTRY;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.EMAIL;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.FILE;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.INDIVIDUAL_NAME;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.NAME;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.PHONE;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.POSITION_NAME;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.POSTAL_CODE;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.SITE;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.STATE;
import java.io.File;
import java.net.URI;
import java.util.Collections;
import java.util.Iterator;
import java.util.Locale;
import java.util.Optional;
import java.util.Set;
import org.n52.iceland.config.annotation.Configurable;
import org.n52.iceland.config.annotation.Setting;
import org.n52.iceland.exception.ConfigurationError;
import org.n52.iceland.exception.ows.OwsExceptionReport;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.CONTACT_INSTRUCTIONS;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.FACSIMILE;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.HOURS_OF_SERVICE;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.ONLINE_RESOURCE;
import org.n52.iceland.util.FileIOHelper;
import org.n52.iceland.util.LocalizedLazyThreadSafeProducer;
import org.n52.iceland.util.StringHelper;
/**
* @author Christian Autermann <[email protected]>
*
* @since 1.0.0
*/
@Configurable
public class ServiceProviderFactory extends LocalizedLazyThreadSafeProducer<OwsServiceProvider> {
private File file;
private String name;
private URI site;
private String individualName;
private String positionName;
private String phone;
private String deliveryPoint;
private String city;
private String postalCode;
private String country;
private String electronicMailAddress;
private String administrativeArea;
private String facsimile;
private String hoursOfService;
private String contactInstructions;
private String onlineResoureTitle;
private String onlineResoureHref;
@Setting(FILE)
public void setFile(File file) {
this.file = file;
setRecreate();
}
@Setting(NAME)
public void setName(String name) throws ConfigurationError {
this.name = name;
setRecreate();
}
@Setting(SITE)
public void setSite(URI site) {
this.site = site;
setRecreate();
}
@Setting(INDIVIDUAL_NAME)
public void setIndividualName(String individualName) {
this.individualName = individualName;
setRecreate();
}
@Setting(POSITION_NAME)
public void setPositionName(String positionName) {
this.positionName = positionName;
setRecreate();
}
@Setting(PHONE)
public void setPhone(String phone) {
this.phone = phone;
setRecreate();
}
@Setting(FACSIMILE)
public void setFacsimile(String facsimile) {
this.facsimile = facsimile;
setRecreate();
}
@Setting(ADDRESS)
public void setDeliveryPoint(String deliveryPoint) {
this.deliveryPoint = deliveryPoint;
setRecreate();
}
@Setting(CITY)
public void setCity(String city) {
this.city = city;
setRecreate();
}
@Setting(POSTAL_CODE)
public void setPostalCode(String postalCode) {
this.postalCode = postalCode;
setRecreate();
}
@Setting(COUNTRY)
public void setCountry(String country) {
this.country = country;
setRecreate();
}
@Setting(EMAIL)
public void setMailAddress(String mailAddress) {
this.electronicMailAddress = mailAddress;
setRecreate();
}
@Setting(STATE)
public void setAdministrativeArea(String administrativeArea) {
this.administrativeArea = administrativeArea;
setRecreate();
}
@Setting(HOURS_OF_SERVICE)
public void setHours(String hours) {
this.hoursOfService = hours;
setRecreate();
}
@Setting(CONTACT_INSTRUCTIONS)
public void setContactInstructions(String contactInstructions) {
this.contactInstructions = contactInstructions;
setRecreate();
}
@Setting(ONLINE_RESOURCE)
public void setOnlineResource(String onlineResource) {
if (Optional.ofNullable(onlineResource).isPresent()) {
Iterable<String> split = Splitter.on("|").trimResults().split(onlineResource);
Iterator<String> iterator = split.iterator();
this.onlineResoureTitle = iterator.next();
this.onlineResoureHref = iterator.next();
setRecreate();
}
}
@Override
protected OwsServiceProvider create(Locale language) throws ConfigurationError {
OwsServiceProvider serviceProvider = new OwsServiceProvider();
if (this.file != null) {
createFromFile(serviceProvider);
} else {
createFromSettings(serviceProvider);
}
return serviceProvider;
}
private void createFromSettings(OwsServiceProvider serviceProvider) {
serviceProvider.setAdministrativeArea(this.administrativeArea);
serviceProvider.setCity(this.city);
serviceProvider.setContactInstructions(this.contactInstructions);
serviceProvider.setCountry(this.country);
serviceProvider.setDeliveryPoint(this.deliveryPoint);
serviceProvider.setFacsimile(this.facsimile);
serviceProvider.setHoursOfService(this.hoursOfService);
serviceProvider.setIndividualName(this.individualName);
serviceProvider.setElectronicMailAddress(this.electronicMailAddress);
serviceProvider.setName(this.name);
serviceProvider.setOnlineResourceHref(this.onlineResoureHref);
serviceProvider.setOnlineResourceTitle(this.onlineResoureTitle);
serviceProvider.setPhone(this.phone);
serviceProvider.setPositionName(this.positionName);
serviceProvider.setPostalCode(this.postalCode);
serviceProvider.setSite(this.site == null ? null : this.site.toString());
}
private void createFromFile(OwsServiceProvider serviceProvider)
throws ConfigurationError {
try {
serviceProvider.setServiceProvider(StringHelper.convertStreamToString(FileIOHelper.loadInputStreamFromFile(this.file)));
} catch (OwsExceptionReport ex) {
throw new ConfigurationError(ex);
}
}
@Override
public Set<Locale> getAvailableLocales() {
return Collections.emptySet();
}
}
| src/main/java/org/n52/iceland/ogc/ows/ServiceProviderFactory.java | /*
* Copyright 2015 52°North Initiative for Geospatial Open Source
* Software GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.n52.iceland.ogc.ows;
import com.google.common.base.Splitter;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.ADDRESS;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.CITY;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.COUNTRY;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.EMAIL;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.FILE;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.INDIVIDUAL_NAME;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.NAME;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.PHONE;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.POSITION_NAME;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.POSTAL_CODE;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.SITE;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.STATE;
import java.io.File;
import java.net.URI;
import java.util.Collections;
import java.util.Iterator;
import java.util.Locale;
import java.util.Set;
import org.n52.iceland.config.annotation.Configurable;
import org.n52.iceland.config.annotation.Setting;
import org.n52.iceland.exception.ConfigurationError;
import org.n52.iceland.exception.ows.OwsExceptionReport;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.CONTACT_INSTRUCTIONS;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.FACSIMILE;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.HOURS_OF_SERVICE;
import static org.n52.iceland.ogc.ows.ServiceProviderFactorySettings.ONLINE_RESOURCE;
import org.n52.iceland.util.FileIOHelper;
import org.n52.iceland.util.LocalizedLazyThreadSafeProducer;
import org.n52.iceland.util.StringHelper;
/**
* @author Christian Autermann <[email protected]>
*
* @since 1.0.0
*/
@Configurable
public class ServiceProviderFactory extends LocalizedLazyThreadSafeProducer<OwsServiceProvider> {
private File file;
private String name;
private URI site;
private String individualName;
private String positionName;
private String phone;
private String deliveryPoint;
private String city;
private String postalCode;
private String country;
private String electronicMailAddress;
private String administrativeArea;
private String facsimile;
private String hoursOfService;
private String contactInstructions;
private String onlineResoureTitle;
private String onlineResoureHref;
@Setting(FILE)
public void setFile(File file) {
this.file = file;
setRecreate();
}
@Setting(NAME)
public void setName(String name) throws ConfigurationError {
this.name = name;
setRecreate();
}
@Setting(SITE)
public void setSite(URI site) {
this.site = site;
setRecreate();
}
@Setting(INDIVIDUAL_NAME)
public void setIndividualName(String individualName) {
this.individualName = individualName;
setRecreate();
}
@Setting(POSITION_NAME)
public void setPositionName(String positionName) {
this.positionName = positionName;
setRecreate();
}
@Setting(PHONE)
public void setPhone(String phone) {
this.phone = phone;
setRecreate();
}
@Setting(FACSIMILE)
public void setFacsimile(String facsimile) {
this.facsimile = facsimile;
setRecreate();
}
@Setting(ADDRESS)
public void setDeliveryPoint(String deliveryPoint) {
this.deliveryPoint = deliveryPoint;
setRecreate();
}
@Setting(CITY)
public void setCity(String city) {
this.city = city;
setRecreate();
}
@Setting(POSTAL_CODE)
public void setPostalCode(String postalCode) {
this.postalCode = postalCode;
setRecreate();
}
@Setting(COUNTRY)
public void setCountry(String country) {
this.country = country;
setRecreate();
}
@Setting(EMAIL)
public void setMailAddress(String mailAddress) {
this.electronicMailAddress = mailAddress;
setRecreate();
}
@Setting(STATE)
public void setAdministrativeArea(String administrativeArea) {
this.administrativeArea = administrativeArea;
setRecreate();
}
@Setting(HOURS_OF_SERVICE)
public void setHours(String hours) {
this.hoursOfService = hours;
setRecreate();
}
@Setting(CONTACT_INSTRUCTIONS)
public void setContactInstructions(String contactInstructions) {
this.contactInstructions = contactInstructions;
setRecreate();
}
@Setting(ONLINE_RESOURCE)
public void setOnlineResource(String onlineResource) {
Iterable<String> split = Splitter.on("|").trimResults().split(onlineResource);
Iterator<String> iterator = split.iterator();
this.onlineResoureTitle = iterator.next();
this.onlineResoureHref = iterator.next();
setRecreate();
}
@Override
protected OwsServiceProvider create(Locale language) throws ConfigurationError {
OwsServiceProvider serviceProvider = new OwsServiceProvider();
if (this.file != null) {
createFromFile(serviceProvider);
} else {
createFromSettings(serviceProvider);
}
return serviceProvider;
}
private void createFromSettings(OwsServiceProvider serviceProvider) {
serviceProvider.setAdministrativeArea(this.administrativeArea);
serviceProvider.setCity(this.city);
serviceProvider.setContactInstructions(this.contactInstructions);
serviceProvider.setCountry(this.country);
serviceProvider.setDeliveryPoint(this.deliveryPoint);
serviceProvider.setFacsimile(this.facsimile);
serviceProvider.setHoursOfService(this.hoursOfService);
serviceProvider.setIndividualName(this.individualName);
serviceProvider.setElectronicMailAddress(this.electronicMailAddress);
serviceProvider.setName(this.name);
serviceProvider.setOnlineResourceHref(this.onlineResoureHref);
serviceProvider.setOnlineResourceTitle(this.onlineResoureTitle);
serviceProvider.setPhone(this.phone);
serviceProvider.setPositionName(this.positionName);
serviceProvider.setPostalCode(this.postalCode);
serviceProvider.setSite(this.site == null ? null : this.site.toString());
}
private void createFromFile(OwsServiceProvider serviceProvider)
throws ConfigurationError {
try {
serviceProvider.setServiceProvider(StringHelper.convertStreamToString(FileIOHelper.loadInputStreamFromFile(this.file)));
} catch (OwsExceptionReport ex) {
throw new ConfigurationError(ex);
}
}
@Override
public Set<Locale> getAvailableLocales() {
return Collections.emptySet();
}
}
| bugfix: handle missing setting
| src/main/java/org/n52/iceland/ogc/ows/ServiceProviderFactory.java | bugfix: handle missing setting |
|
Java | apache-2.0 | ecbaa0a68d89070ccb406bbb0378e72d9392a256 | 0 | apetrucci/katharsis-framework,adnovum/katharsis-framework,katharsis-project/katharsis-framework,iMDT/katharsis-framework-j6,apetrucci/katharsis-framework,apetrucci/katharsis-framework,adnovum/katharsis-framework,apetrucci/katharsis-framework,katharsis-project/katharsis-framework,dustinstanley/katharsis-framework,katharsis-project/katharsis-framework,katharsis-project/katharsis-framework,adnovum/katharsis-framework,adnovum/katharsis-framework,katharsis-project/katharsis-framework,iMDT/katharsis-framework-j6,dustinstanley/katharsis-framework,iMDT/katharsis-framework-j6,apetrucci/katharsis-framework,iMDT/katharsis-framework-j6 | package io.katharsis.rs.jackson;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.katharsis.jackson.ContainerSerializer;
import io.katharsis.jackson.DataLinksContainerSerializer;
import io.katharsis.jackson.ObjectMapperBuilder;
import io.katharsis.jackson.RelationshipContainerSerializer;
import io.katharsis.resource.registry.ResourceRegistry;
import javax.inject.Inject;
import javax.ws.rs.ext.ContextResolver;
import javax.ws.rs.ext.Provider;
@Provider
public class JsonApiObjectMapperResolver implements ContextResolver<ObjectMapper> {
private ResourceRegistry resourceRegistry;
@Inject
public JsonApiObjectMapperResolver(ResourceRegistry resourceRegistry) {
this.resourceRegistry = resourceRegistry;
}
@Override
public ObjectMapper getContext(Class<?> type) {
ObjectMapperBuilder objectMapperBuilder = new ObjectMapperBuilder();
ObjectMapper objectMapper = objectMapperBuilder.buildWith(new ContainerSerializer(resourceRegistry),
new DataLinksContainerSerializer(resourceRegistry),
new RelationshipContainerSerializer(resourceRegistry));
return objectMapper;
}
}
| src/main/java/io/katharsis/rs/jackson/JsonApiObjectMapperResolver.java | package io.katharsis.rs.jackson;
import com.fasterxml.jackson.core.Version;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
import io.katharsis.jackson.ContainerSerializer;
import io.katharsis.jackson.LinksContainerSerializer;
import io.katharsis.resource.registry.ResourceRegistry;
import javax.inject.Inject;
import javax.ws.rs.ext.ContextResolver;
import javax.ws.rs.ext.Provider;
@Provider
public class JsonApiObjectMapperResolver implements ContextResolver<ObjectMapper> {
private ResourceRegistry resourceRegistry;
@Inject
public JsonApiObjectMapperResolver(ResourceRegistry resourceRegistry) {
this.resourceRegistry = resourceRegistry;
}
@Override
public ObjectMapper getContext(Class<?> type) {
ObjectMapper objectMapper = new ObjectMapper();
SimpleModule simpleModule = new SimpleModule("SimpleModule",
new Version(1, 0, 0, null, null, null));
simpleModule.addSerializer(new ContainerSerializer(resourceRegistry));
simpleModule.addSerializer(new LinksContainerSerializer(resourceRegistry));
objectMapper.registerModule(simpleModule);
return objectMapper;
}
}
| improved serialization
| src/main/java/io/katharsis/rs/jackson/JsonApiObjectMapperResolver.java | improved serialization |
|
Java | apache-2.0 | d49dff6dc2a288b032f1f615d692d20d91096fa7 | 0 | nicolaferraro/camel,tdiesler/camel,ullgren/camel,davidkarlsen/camel,pmoerenhout/camel,nicolaferraro/camel,tadayosi/camel,DariusX/camel,pmoerenhout/camel,CodeSmell/camel,tdiesler/camel,CodeSmell/camel,Fabryprog/camel,adessaigne/camel,CodeSmell/camel,nicolaferraro/camel,tadayosi/camel,alvinkwekel/camel,pmoerenhout/camel,christophd/camel,adessaigne/camel,apache/camel,DariusX/camel,ullgren/camel,pmoerenhout/camel,objectiser/camel,pax95/camel,christophd/camel,pax95/camel,objectiser/camel,Fabryprog/camel,davidkarlsen/camel,pax95/camel,cunningt/camel,cunningt/camel,alvinkwekel/camel,gnodet/camel,zregvart/camel,gnodet/camel,davidkarlsen/camel,objectiser/camel,zregvart/camel,apache/camel,christophd/camel,ullgren/camel,davidkarlsen/camel,nicolaferraro/camel,gnodet/camel,apache/camel,mcollovati/camel,mcollovati/camel,Fabryprog/camel,CodeSmell/camel,adessaigne/camel,cunningt/camel,zregvart/camel,ullgren/camel,nikhilvibhav/camel,adessaigne/camel,mcollovati/camel,apache/camel,pmoerenhout/camel,Fabryprog/camel,tadayosi/camel,objectiser/camel,pax95/camel,tdiesler/camel,tadayosi/camel,cunningt/camel,christophd/camel,nikhilvibhav/camel,cunningt/camel,tadayosi/camel,apache/camel,adessaigne/camel,gnodet/camel,DariusX/camel,gnodet/camel,apache/camel,alvinkwekel/camel,christophd/camel,pax95/camel,mcollovati/camel,tdiesler/camel,nikhilvibhav/camel,nikhilvibhav/camel,adessaigne/camel,christophd/camel,tdiesler/camel,cunningt/camel,tadayosi/camel,pmoerenhout/camel,alvinkwekel/camel,tdiesler/camel,zregvart/camel,DariusX/camel,pax95/camel | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.properties;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.stream.Collectors;
import org.apache.camel.Endpoint;
import org.apache.camel.api.management.ManagedAttribute;
import org.apache.camel.api.management.ManagedOperation;
import org.apache.camel.api.management.ManagedResource;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.annotations.Component;
import org.apache.camel.support.DefaultComponent;
import org.apache.camel.support.LRUCacheFactory;
import org.apache.camel.util.FilePathResolver;
import org.apache.camel.util.ObjectHelper;
/**
* The <a href="http://camel.apache.org/properties">Properties Component</a> allows you to use property placeholders when defining Endpoint URIs
*/
@Component("properties")
@ManagedResource(description = "Managed PropertiesComponent")
public class PropertiesComponent extends DefaultComponent implements org.apache.camel.spi.PropertiesComponent {
/**
* Never check system properties.
*/
public static final int SYSTEM_PROPERTIES_MODE_NEVER = 0;
/**
* Check system properties if not resolvable in the specified properties.
*/
public static final int SYSTEM_PROPERTIES_MODE_FALLBACK = 1;
/**
* Check system properties variables) first, before trying the specified properties.
* This allows system properties to override any other property source.
* <p/>
* This is the default.
*/
public static final int SYSTEM_PROPERTIES_MODE_OVERRIDE = 2;
/**
* Never check OS environment variables.
*/
public static final int ENVIRONMENT_VARIABLES_MODE_NEVER = 0;
/**
* Check OS environment variables if not resolvable in the specified properties.
* <p/>
* This is the default.
*/
public static final int ENVIRONMENT_VARIABLES_MODE_FALLBACK = 1;
/**
* Check OS environment variables first, before trying the specified properties.
* This allows system properties to override any other property source.
*/
public static final int ENVIRONMENT_VARIABLES_MODE_OVERRIDE = 2;
/**
* Key for stores special override properties that containers such as OSGi can store
* in the OSGi service registry
*/
public static final String OVERRIDE_PROPERTIES = PropertiesComponent.class.getName() + ".OverrideProperties";
@SuppressWarnings("unchecked")
private final Map<CacheKey, Properties> cacheMap = LRUCacheFactory.newLRUSoftCache(1000);
private final Map<String, PropertiesFunction> functions = new LinkedHashMap<>();
private PropertiesResolver propertiesResolver = new DefaultPropertiesResolver(this);
private PropertiesParser propertiesParser = new DefaultPropertiesParser(this);
private List<PropertiesLocation> locations = Collections.emptyList();
private transient String propertyPrefixResolved;
@Metadata
private boolean ignoreMissingLocation;
@Metadata
private String encoding;
@Metadata(defaultValue = "true")
private boolean cache = true;
@Metadata(label = "advanced")
private String propertyPrefix;
@Metadata(label = "advanced")
private String propertySuffix;
private transient String propertySuffixResolved;
@Metadata(label = "advanced", defaultValue = "true")
private boolean fallbackToUnaugmentedProperty = true;
@Metadata(defaultValue = "true")
private boolean defaultFallbackEnabled = true;
@Metadata(label = "advanced", defaultValue = DEFAULT_PREFIX_TOKEN)
private String prefixToken = DEFAULT_PREFIX_TOKEN;
@Metadata(label = "advanced", defaultValue = DEFAULT_SUFFIX_TOKEN)
private String suffixToken = DEFAULT_SUFFIX_TOKEN;
@Metadata(label = "advanced")
private Properties initialProperties;
@Metadata(label = "advanced")
private Properties overrideProperties;
@Metadata(defaultValue = "" + SYSTEM_PROPERTIES_MODE_OVERRIDE, enums = "0,1,2")
private int systemPropertiesMode = SYSTEM_PROPERTIES_MODE_OVERRIDE;
@Metadata(defaultValue = "" + SYSTEM_PROPERTIES_MODE_FALLBACK, enums = "0,1,2")
private int environmentVariableMode = ENVIRONMENT_VARIABLES_MODE_FALLBACK;
public PropertiesComponent() {
super();
// include out of the box functions
addFunction(new EnvPropertiesFunction());
addFunction(new SysPropertiesFunction());
addFunction(new ServicePropertiesFunction());
addFunction(new ServiceHostPropertiesFunction());
addFunction(new ServicePortPropertiesFunction());
}
public PropertiesComponent(String location) {
this();
setLocation(location);
}
public PropertiesComponent(String... locations) {
this();
setLocations(locations);
}
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
List<PropertiesLocation> paths = locations;
Boolean ignoreMissingLocationLoc = getAndRemoveParameter(parameters, "ignoreMissingLocation", Boolean.class);
if (ignoreMissingLocationLoc != null) {
ignoreMissingLocation = ignoreMissingLocationLoc;
}
// override default locations
String locations = getAndRemoveParameter(parameters, "locations", String.class);
if (locations != null) {
log.trace("Overriding default locations with location: {}", locations);
paths = Arrays.stream(locations.split(",")).map(PropertiesLocation::new).collect(Collectors.toList());
}
String endpointUri = parseUri(remaining, paths);
log.debug("Endpoint uri parsed as: {}", endpointUri);
Endpoint delegate = getCamelContext().getEndpoint(endpointUri);
PropertiesEndpoint answer = new PropertiesEndpoint(uri, delegate, this);
setProperties(answer, parameters);
return answer;
}
public String parseUri(String uri) throws Exception {
return parseUri(uri, locations);
}
public String parseUri(String uri, String... locations) throws Exception {
return parseUri(
uri,
locations != null
? Arrays.stream(locations).map(PropertiesLocation::new).collect(Collectors.toList())
: Collections.emptyList());
}
public Properties loadProperties() throws Exception {
return doLoadProperties(locations);
}
public Properties loadProperties(String... locations) throws Exception {
if (locations != null) {
return doLoadProperties(Arrays.stream(locations).map(PropertiesLocation::new).collect(Collectors.toList()));
}
return new Properties();
}
protected Properties doLoadProperties(List<PropertiesLocation> paths) throws Exception {
Properties prop = new Properties();
// use initial properties
if (initialProperties != null) {
prop.putAll(initialProperties);
}
// use locations
if (paths != null) {
// location may contain JVM system property or OS environment variables
// so we need to parse those
List<PropertiesLocation> locations = parseLocations(paths);
// check cache first
CacheKey key = new CacheKey(locations);
Properties locationsProp = cache ? cacheMap.get(key) : null;
if (locationsProp == null) {
locationsProp = propertiesResolver.resolveProperties(getCamelContext(), ignoreMissingLocation, locations);
if (cache) {
cacheMap.put(key, locationsProp);
}
}
prop.putAll(locationsProp);
}
// use override properties
if (overrideProperties != null) {
// make a copy to avoid affecting the original properties
Properties override = new Properties();
override.putAll(prop);
override.putAll(overrideProperties);
prop = override;
}
return prop;
}
protected String parseUri(String uri, List<PropertiesLocation> paths) throws Exception {
Properties prop = doLoadProperties(paths);
// enclose tokens if missing
if (!uri.contains(prefixToken) && !uri.startsWith(prefixToken)) {
uri = prefixToken + uri;
}
if (!uri.contains(suffixToken) && !uri.endsWith(suffixToken)) {
uri = uri + suffixToken;
}
log.trace("Parsing uri {} with properties: {}", uri, prop);
if (propertiesParser instanceof AugmentedPropertyNameAwarePropertiesParser) {
return ((AugmentedPropertyNameAwarePropertiesParser) propertiesParser).parseUri(
uri,
prop,
prefixToken,
suffixToken,
propertyPrefixResolved,
propertySuffixResolved,
fallbackToUnaugmentedProperty,
defaultFallbackEnabled);
} else {
return propertiesParser.parseUri(uri, prop, prefixToken, suffixToken);
}
}
public List<PropertiesLocation> getLocations() {
return locations;
}
/**
* A list of locations to load properties.
* This option will override any default locations and only use the locations from this option.
*/
public void setLocations(List<PropertiesLocation> locations) {
this.locations = Collections.unmodifiableList(locations);
}
/**
* A list of locations to load properties.
* This option will override any default locations and only use the locations from this option.
*/
public void setLocations(String[] locationStrings) {
List<PropertiesLocation> locations = new ArrayList<>();
if (locationStrings != null) {
for (String locationString : locationStrings) {
locations.add(new PropertiesLocation(locationString));
}
}
setLocations(locations);
}
/**
* A list of locations to load properties.
* This option will override any default locations and only use the locations from this option.
*/
public void setLocations(Collection<String> locationStrings) {
List<PropertiesLocation> locations = new ArrayList<>();
if (locationStrings != null) {
for (String locationString : locationStrings) {
locations.add(new PropertiesLocation(locationString));
}
}
setLocations(locations);
}
public void addLocation(String location) {
if (location != null) {
List<PropertiesLocation> newLocations = new ArrayList<>();
for (String loc : location.split(",")) {
newLocations.add(new PropertiesLocation(loc));
}
List<PropertiesLocation> current = getLocations();
if (!current.isEmpty()) {
newLocations.addAll(0, current);
}
setLocations(newLocations);
}
}
/**
* A list of locations to load properties. You can use comma to separate multiple locations.
* This option will override any default locations and only use the locations from this option.
*/
public void setLocation(String location) {
if (location != null) {
setLocations(location.split(","));
}
}
@ManagedAttribute(description = "Encoding to use when loading properties file from the file system or classpath")
public String getEncoding() {
return encoding;
}
/**
* Encoding to use when loading properties file from the file system or classpath.
* <p/>
* If no encoding has been set, then the properties files is loaded using ISO-8859-1 encoding (latin-1)
* as documented by {@link java.util.Properties#load(java.io.InputStream)}
*/
public void setEncoding(String encoding) {
this.encoding = encoding;
}
public PropertiesResolver getPropertiesResolver() {
return propertiesResolver;
}
/**
* To use a custom PropertiesResolver
*/
public void setPropertiesResolver(PropertiesResolver propertiesResolver) {
this.propertiesResolver = propertiesResolver;
}
public PropertiesParser getPropertiesParser() {
return propertiesParser;
}
/**
* To use a custom PropertiesParser
*/
public void setPropertiesParser(PropertiesParser propertiesParser) {
this.propertiesParser = propertiesParser;
}
@ManagedAttribute(description = "Whether to cache loaded properties")
public boolean isCache() {
return cache;
}
/**
* Whether or not to cache loaded properties. The default value is true.
*/
public void setCache(boolean cache) {
this.cache = cache;
}
public String getPropertyPrefix() {
return propertyPrefix;
}
/**
* Optional prefix prepended to property names before resolution.
*/
public void setPropertyPrefix(String propertyPrefix) {
this.propertyPrefix = propertyPrefix;
this.propertyPrefixResolved = propertyPrefix;
if (ObjectHelper.isNotEmpty(this.propertyPrefix)) {
this.propertyPrefixResolved = FilePathResolver.resolvePath(this.propertyPrefix);
}
}
public String getPropertySuffix() {
return propertySuffix;
}
/**
* Optional suffix appended to property names before resolution.
*/
public void setPropertySuffix(String propertySuffix) {
this.propertySuffix = propertySuffix;
this.propertySuffixResolved = propertySuffix;
if (ObjectHelper.isNotEmpty(this.propertySuffix)) {
this.propertySuffixResolved = FilePathResolver.resolvePath(this.propertySuffix);
}
}
public boolean isFallbackToUnaugmentedProperty() {
return fallbackToUnaugmentedProperty;
}
/**
* If true, first attempt resolution of property name augmented with propertyPrefix and propertySuffix
* before falling back the plain property name specified. If false, only the augmented property name is searched.
*/
public void setFallbackToUnaugmentedProperty(boolean fallbackToUnaugmentedProperty) {
this.fallbackToUnaugmentedProperty = fallbackToUnaugmentedProperty;
}
@ManagedAttribute(description = "Whether to support using fallback values if a property cannot be found")
public boolean isDefaultFallbackEnabled() {
return defaultFallbackEnabled;
}
/**
* If false, the component does not attempt to find a default for the key by looking after the colon separator.
*/
public void setDefaultFallbackEnabled(boolean defaultFallbackEnabled) {
this.defaultFallbackEnabled = defaultFallbackEnabled;
}
@ManagedAttribute(description = "Ignore missing location")
public boolean isIgnoreMissingLocation() {
return ignoreMissingLocation;
}
/**
* Whether to silently ignore if a location cannot be located, such as a properties file not found.
*/
public void setIgnoreMissingLocation(boolean ignoreMissingLocation) {
this.ignoreMissingLocation = ignoreMissingLocation;
}
@ManagedAttribute(description = "Prefix token")
public String getPrefixToken() {
return prefixToken;
}
/**
* Sets the value of the prefix token used to identify properties to replace. Setting a value of
* {@code null} restores the default token (@link {@link #DEFAULT_PREFIX_TOKEN}).
*/
public void setPrefixToken(String prefixToken) {
if (prefixToken == null) {
this.prefixToken = DEFAULT_PREFIX_TOKEN;
} else {
this.prefixToken = prefixToken;
}
}
@ManagedAttribute(description = "Suffix token")
public String getSuffixToken() {
return suffixToken;
}
/**
* Sets the value of the suffix token used to identify properties to replace. Setting a value of
* {@code null} restores the default token (@link {@link #DEFAULT_SUFFIX_TOKEN}).
*/
public void setSuffixToken(String suffixToken) {
if (suffixToken == null) {
this.suffixToken = DEFAULT_SUFFIX_TOKEN;
} else {
this.suffixToken = suffixToken;
}
}
public Properties getInitialProperties() {
return initialProperties;
}
/**
* Sets initial properties which will be used before any locations are resolved.
*
* @param initialProperties properties that are added first
*/
public void setInitialProperties(Properties initialProperties) {
this.initialProperties = initialProperties;
}
public Properties getOverrideProperties() {
return overrideProperties;
}
/**
* Sets a special list of override properties that take precedence
* and will use first, if a property exist.
*
* @param overrideProperties properties that is used first
*/
public void setOverrideProperties(Properties overrideProperties) {
this.overrideProperties = overrideProperties;
}
/**
* Gets the functions registered in this properties component.
*/
public Map<String, PropertiesFunction> getFunctions() {
return functions;
}
/**
* Registers the {@link org.apache.camel.component.properties.PropertiesFunction} as a function to this component.
*/
public void addFunction(PropertiesFunction function) {
this.functions.put(function.getName(), function);
}
/**
* Is there a {@link org.apache.camel.component.properties.PropertiesFunction} with the given name?
*/
public boolean hasFunction(String name) {
return functions.containsKey(name);
}
@ManagedAttribute(description = "System properties mode")
public int getSystemPropertiesMode() {
return systemPropertiesMode;
}
/**
* Sets the system property (and environment variable) mode.
*
* The default mode (override) is to check system properties (and environment variables) first,
* before trying the specified properties.
* This allows system properties/environment variables to override any other property source.
*
* @see #SYSTEM_PROPERTIES_MODE_NEVER
* @see #SYSTEM_PROPERTIES_MODE_FALLBACK
* @see #SYSTEM_PROPERTIES_MODE_OVERRIDE
*/
public void setSystemPropertiesMode(int systemPropertiesMode) {
this.systemPropertiesMode = systemPropertiesMode;
}
@ManagedAttribute(description = "Environment variable mode")
public int getEnvironmentVariableMode() {
return environmentVariableMode;
}
/**
* Sets the OS environment variables mode.
*
* The default mode (fallback) is to check OS environment variables,
* if the property cannot be resolved from its sources first.
* This allows environment variables as fallback values.
*
* @see #ENVIRONMENT_VARIABLES_MODE_NEVER
* @see #ENVIRONMENT_VARIABLES_MODE_FALLBACK
* @see #ENVIRONMENT_VARIABLES_MODE_OVERRIDE
*/
public void setEnvironmentVariableMode(int environmentVariableMode) {
this.environmentVariableMode = environmentVariableMode;
}
@Override
public boolean isResolvePropertyPlaceholders() {
// its chicken and egg, we cannot resolve placeholders on ourselves
return false;
}
/**
* Clears the cache
*/
@ManagedOperation(description = "Clears the cache")
public void clearCache() {
this.cacheMap.clear();
}
@Override
protected void doStart() throws Exception {
super.doStart();
if (systemPropertiesMode != SYSTEM_PROPERTIES_MODE_NEVER
&& systemPropertiesMode != SYSTEM_PROPERTIES_MODE_FALLBACK
&& systemPropertiesMode != SYSTEM_PROPERTIES_MODE_OVERRIDE) {
throw new IllegalArgumentException("Option systemPropertiesMode has invalid value: " + systemPropertiesMode);
}
if (environmentVariableMode != ENVIRONMENT_VARIABLES_MODE_NEVER
&& environmentVariableMode != ENVIRONMENT_VARIABLES_MODE_FALLBACK
&& environmentVariableMode != ENVIRONMENT_VARIABLES_MODE_OVERRIDE) {
throw new IllegalArgumentException("Option environmentVariableMode has invalid value: " + environmentVariableMode);
}
// inject the component to the parser
if (propertiesParser instanceof DefaultPropertiesParser) {
((DefaultPropertiesParser) propertiesParser).setPropertiesComponent(this);
}
}
@Override
protected void doStop() throws Exception {
cacheMap.clear();
super.doStop();
}
private List<PropertiesLocation> parseLocations(List<PropertiesLocation> locations) {
List<PropertiesLocation> answer = new ArrayList<>();
for (PropertiesLocation location : locations) {
log.trace("Parsing location: {}", location);
try {
String path = FilePathResolver.resolvePath(location.getPath());
log.debug("Parsed location: {}", path);
if (ObjectHelper.isNotEmpty(path)) {
answer.add(new PropertiesLocation(
location.getResolver(),
path,
location.isOptional())
);
}
} catch (IllegalArgumentException e) {
if (!ignoreMissingLocation && !location.isOptional()) {
throw e;
} else {
log.debug("Ignored missing location: {}", location);
}
}
}
// must return a not-null answer
return answer;
}
/**
* Key used in the locations cache
*/
private static final class CacheKey implements Serializable {
private static final long serialVersionUID = 1L;
private final List<PropertiesLocation> locations;
private CacheKey(List<PropertiesLocation> locations) {
this.locations = new ArrayList<>(locations);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CacheKey that = (CacheKey) o;
return locations.equals(that.locations);
}
@Override
public int hashCode() {
return locations.hashCode();
}
@Override
public String toString() {
return "LocationKey[" + locations.toString() + "]";
}
}
}
| components/camel-properties/src/main/java/org/apache/camel/component/properties/PropertiesComponent.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.properties;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.stream.Collectors;
import org.apache.camel.Endpoint;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.annotations.Component;
import org.apache.camel.support.DefaultComponent;
import org.apache.camel.support.LRUCacheFactory;
import org.apache.camel.util.FilePathResolver;
import org.apache.camel.util.ObjectHelper;
/**
* The <a href="http://camel.apache.org/properties">Properties Component</a> allows you to use property placeholders when defining Endpoint URIs
*/
@Component("properties")
public class PropertiesComponent extends DefaultComponent implements org.apache.camel.spi.PropertiesComponent {
/**
* Never check system properties.
*/
public static final int SYSTEM_PROPERTIES_MODE_NEVER = 0;
/**
* Check system properties if not resolvable in the specified properties.
*/
public static final int SYSTEM_PROPERTIES_MODE_FALLBACK = 1;
/**
* Check system properties variables) first, before trying the specified properties.
* This allows system properties to override any other property source.
* <p/>
* This is the default.
*/
public static final int SYSTEM_PROPERTIES_MODE_OVERRIDE = 2;
/**
* Never check OS environment variables.
*/
public static final int ENVIRONMENT_VARIABLES_MODE_NEVER = 0;
/**
* Check OS environment variables if not resolvable in the specified properties.
* <p/>
* This is the default.
*/
public static final int ENVIRONMENT_VARIABLES_MODE_FALLBACK = 1;
/**
* Check OS environment variables first, before trying the specified properties.
* This allows system properties to override any other property source.
*/
public static final int ENVIRONMENT_VARIABLES_MODE_OVERRIDE = 2;
/**
* Key for stores special override properties that containers such as OSGi can store
* in the OSGi service registry
*/
public static final String OVERRIDE_PROPERTIES = PropertiesComponent.class.getName() + ".OverrideProperties";
@SuppressWarnings("unchecked")
private final Map<CacheKey, Properties> cacheMap = LRUCacheFactory.newLRUSoftCache(1000);
private final Map<String, PropertiesFunction> functions = new LinkedHashMap<>();
private PropertiesResolver propertiesResolver = new DefaultPropertiesResolver(this);
private PropertiesParser propertiesParser = new DefaultPropertiesParser(this);
private List<PropertiesLocation> locations = Collections.emptyList();
private transient String propertyPrefixResolved;
@Metadata
private boolean ignoreMissingLocation;
@Metadata
private String encoding;
@Metadata(defaultValue = "true")
private boolean cache = true;
@Metadata(label = "advanced")
private String propertyPrefix;
@Metadata(label = "advanced")
private String propertySuffix;
private transient String propertySuffixResolved;
@Metadata(label = "advanced", defaultValue = "true")
private boolean fallbackToUnaugmentedProperty = true;
@Metadata(defaultValue = "true")
private boolean defaultFallbackEnabled = true;
@Metadata(label = "advanced", defaultValue = DEFAULT_PREFIX_TOKEN)
private String prefixToken = DEFAULT_PREFIX_TOKEN;
@Metadata(label = "advanced", defaultValue = DEFAULT_SUFFIX_TOKEN)
private String suffixToken = DEFAULT_SUFFIX_TOKEN;
@Metadata(label = "advanced")
private Properties initialProperties;
@Metadata(label = "advanced")
private Properties overrideProperties;
@Metadata(defaultValue = "" + SYSTEM_PROPERTIES_MODE_OVERRIDE, enums = "0,1,2")
private int systemPropertiesMode = SYSTEM_PROPERTIES_MODE_OVERRIDE;
@Metadata(defaultValue = "" + SYSTEM_PROPERTIES_MODE_FALLBACK, enums = "0,1,2")
private int environmentVariableMode = ENVIRONMENT_VARIABLES_MODE_FALLBACK;
public PropertiesComponent() {
super();
// include out of the box functions
addFunction(new EnvPropertiesFunction());
addFunction(new SysPropertiesFunction());
addFunction(new ServicePropertiesFunction());
addFunction(new ServiceHostPropertiesFunction());
addFunction(new ServicePortPropertiesFunction());
}
public PropertiesComponent(String location) {
this();
setLocation(location);
}
public PropertiesComponent(String... locations) {
this();
setLocations(locations);
}
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
List<PropertiesLocation> paths = locations;
Boolean ignoreMissingLocationLoc = getAndRemoveParameter(parameters, "ignoreMissingLocation", Boolean.class);
if (ignoreMissingLocationLoc != null) {
ignoreMissingLocation = ignoreMissingLocationLoc;
}
// override default locations
String locations = getAndRemoveParameter(parameters, "locations", String.class);
if (locations != null) {
log.trace("Overriding default locations with location: {}", locations);
paths = Arrays.stream(locations.split(",")).map(PropertiesLocation::new).collect(Collectors.toList());
}
String endpointUri = parseUri(remaining, paths);
log.debug("Endpoint uri parsed as: {}", endpointUri);
Endpoint delegate = getCamelContext().getEndpoint(endpointUri);
PropertiesEndpoint answer = new PropertiesEndpoint(uri, delegate, this);
setProperties(answer, parameters);
return answer;
}
public String parseUri(String uri) throws Exception {
return parseUri(uri, locations);
}
public String parseUri(String uri, String... locations) throws Exception {
return parseUri(
uri,
locations != null
? Arrays.stream(locations).map(PropertiesLocation::new).collect(Collectors.toList())
: Collections.emptyList());
}
public Properties loadProperties() throws Exception {
return doLoadProperties(locations);
}
public Properties loadProperties(String... locations) throws Exception {
if (locations != null) {
return doLoadProperties(Arrays.stream(locations).map(PropertiesLocation::new).collect(Collectors.toList()));
}
return new Properties();
}
protected Properties doLoadProperties(List<PropertiesLocation> paths) throws Exception {
Properties prop = new Properties();
// use initial properties
if (initialProperties != null) {
prop.putAll(initialProperties);
}
// use locations
if (paths != null) {
// location may contain JVM system property or OS environment variables
// so we need to parse those
List<PropertiesLocation> locations = parseLocations(paths);
// check cache first
CacheKey key = new CacheKey(locations);
Properties locationsProp = cache ? cacheMap.get(key) : null;
if (locationsProp == null) {
locationsProp = propertiesResolver.resolveProperties(getCamelContext(), ignoreMissingLocation, locations);
if (cache) {
cacheMap.put(key, locationsProp);
}
}
prop.putAll(locationsProp);
}
// use override properties
if (overrideProperties != null) {
// make a copy to avoid affecting the original properties
Properties override = new Properties();
override.putAll(prop);
override.putAll(overrideProperties);
prop = override;
}
return prop;
}
protected String parseUri(String uri, List<PropertiesLocation> paths) throws Exception {
Properties prop = doLoadProperties(paths);
// enclose tokens if missing
if (!uri.contains(prefixToken) && !uri.startsWith(prefixToken)) {
uri = prefixToken + uri;
}
if (!uri.contains(suffixToken) && !uri.endsWith(suffixToken)) {
uri = uri + suffixToken;
}
log.trace("Parsing uri {} with properties: {}", uri, prop);
if (propertiesParser instanceof AugmentedPropertyNameAwarePropertiesParser) {
return ((AugmentedPropertyNameAwarePropertiesParser) propertiesParser).parseUri(
uri,
prop,
prefixToken,
suffixToken,
propertyPrefixResolved,
propertySuffixResolved,
fallbackToUnaugmentedProperty,
defaultFallbackEnabled);
} else {
return propertiesParser.parseUri(uri, prop, prefixToken, suffixToken);
}
}
public List<PropertiesLocation> getLocations() {
return locations;
}
/**
* A list of locations to load properties.
* This option will override any default locations and only use the locations from this option.
*/
public void setLocations(List<PropertiesLocation> locations) {
this.locations = Collections.unmodifiableList(locations);
}
/**
* A list of locations to load properties.
* This option will override any default locations and only use the locations from this option.
*/
public void setLocations(String[] locationStrings) {
List<PropertiesLocation> locations = new ArrayList<>();
if (locationStrings != null) {
for (String locationString : locationStrings) {
locations.add(new PropertiesLocation(locationString));
}
}
setLocations(locations);
}
/**
* A list of locations to load properties.
* This option will override any default locations and only use the locations from this option.
*/
public void setLocations(Collection<String> locationStrings) {
List<PropertiesLocation> locations = new ArrayList<>();
if (locationStrings != null) {
for (String locationString : locationStrings) {
locations.add(new PropertiesLocation(locationString));
}
}
setLocations(locations);
}
public void addLocation(String location) {
if (location != null) {
List<PropertiesLocation> newLocations = new ArrayList<>();
for (String loc : location.split(",")) {
newLocations.add(new PropertiesLocation(loc));
}
List<PropertiesLocation> current = getLocations();
if (!current.isEmpty()) {
newLocations.addAll(0, current);
}
setLocations(newLocations);
}
}
/**
* A list of locations to load properties. You can use comma to separate multiple locations.
* This option will override any default locations and only use the locations from this option.
*/
public void setLocation(String location) {
if (location != null) {
setLocations(location.split(","));
}
}
public String getEncoding() {
return encoding;
}
/**
* Encoding to use when loading properties file from the file system or classpath.
* <p/>
* If no encoding has been set, then the properties files is loaded using ISO-8859-1 encoding (latin-1)
* as documented by {@link java.util.Properties#load(java.io.InputStream)}
*/
public void setEncoding(String encoding) {
this.encoding = encoding;
}
public PropertiesResolver getPropertiesResolver() {
return propertiesResolver;
}
/**
* To use a custom PropertiesResolver
*/
public void setPropertiesResolver(PropertiesResolver propertiesResolver) {
this.propertiesResolver = propertiesResolver;
}
public PropertiesParser getPropertiesParser() {
return propertiesParser;
}
/**
* To use a custom PropertiesParser
*/
public void setPropertiesParser(PropertiesParser propertiesParser) {
this.propertiesParser = propertiesParser;
}
public boolean isCache() {
return cache;
}
/**
* Whether or not to cache loaded properties. The default value is true.
*/
public void setCache(boolean cache) {
this.cache = cache;
}
public String getPropertyPrefix() {
return propertyPrefix;
}
/**
* Optional prefix prepended to property names before resolution.
*/
public void setPropertyPrefix(String propertyPrefix) {
this.propertyPrefix = propertyPrefix;
this.propertyPrefixResolved = propertyPrefix;
if (ObjectHelper.isNotEmpty(this.propertyPrefix)) {
this.propertyPrefixResolved = FilePathResolver.resolvePath(this.propertyPrefix);
}
}
public String getPropertySuffix() {
return propertySuffix;
}
/**
* Optional suffix appended to property names before resolution.
*/
public void setPropertySuffix(String propertySuffix) {
this.propertySuffix = propertySuffix;
this.propertySuffixResolved = propertySuffix;
if (ObjectHelper.isNotEmpty(this.propertySuffix)) {
this.propertySuffixResolved = FilePathResolver.resolvePath(this.propertySuffix);
}
}
public boolean isFallbackToUnaugmentedProperty() {
return fallbackToUnaugmentedProperty;
}
/**
* If true, first attempt resolution of property name augmented with propertyPrefix and propertySuffix
* before falling back the plain property name specified. If false, only the augmented property name is searched.
*/
public void setFallbackToUnaugmentedProperty(boolean fallbackToUnaugmentedProperty) {
this.fallbackToUnaugmentedProperty = fallbackToUnaugmentedProperty;
}
public boolean isDefaultFallbackEnabled() {
return defaultFallbackEnabled;
}
/**
* If false, the component does not attempt to find a default for the key by looking after the colon separator.
*/
public void setDefaultFallbackEnabled(boolean defaultFallbackEnabled) {
this.defaultFallbackEnabled = defaultFallbackEnabled;
}
public boolean isIgnoreMissingLocation() {
return ignoreMissingLocation;
}
/**
* Whether to silently ignore if a location cannot be located, such as a properties file not found.
*/
public void setIgnoreMissingLocation(boolean ignoreMissingLocation) {
this.ignoreMissingLocation = ignoreMissingLocation;
}
public String getPrefixToken() {
return prefixToken;
}
/**
* Sets the value of the prefix token used to identify properties to replace. Setting a value of
* {@code null} restores the default token (@link {@link #DEFAULT_PREFIX_TOKEN}).
*/
public void setPrefixToken(String prefixToken) {
if (prefixToken == null) {
this.prefixToken = DEFAULT_PREFIX_TOKEN;
} else {
this.prefixToken = prefixToken;
}
}
public String getSuffixToken() {
return suffixToken;
}
/**
* Sets the value of the suffix token used to identify properties to replace. Setting a value of
* {@code null} restores the default token (@link {@link #DEFAULT_SUFFIX_TOKEN}).
*/
public void setSuffixToken(String suffixToken) {
if (suffixToken == null) {
this.suffixToken = DEFAULT_SUFFIX_TOKEN;
} else {
this.suffixToken = suffixToken;
}
}
public Properties getInitialProperties() {
return initialProperties;
}
/**
* Sets initial properties which will be used before any locations are resolved.
*
* @param initialProperties properties that are added first
*/
public void setInitialProperties(Properties initialProperties) {
this.initialProperties = initialProperties;
}
public Properties getOverrideProperties() {
return overrideProperties;
}
/**
* Sets a special list of override properties that take precedence
* and will use first, if a property exist.
*
* @param overrideProperties properties that is used first
*/
public void setOverrideProperties(Properties overrideProperties) {
this.overrideProperties = overrideProperties;
}
/**
* Gets the functions registered in this properties component.
*/
public Map<String, PropertiesFunction> getFunctions() {
return functions;
}
/**
* Registers the {@link org.apache.camel.component.properties.PropertiesFunction} as a function to this component.
*/
public void addFunction(PropertiesFunction function) {
this.functions.put(function.getName(), function);
}
/**
* Is there a {@link org.apache.camel.component.properties.PropertiesFunction} with the given name?
*/
public boolean hasFunction(String name) {
return functions.containsKey(name);
}
public int getSystemPropertiesMode() {
return systemPropertiesMode;
}
/**
* Sets the system property (and environment variable) mode.
*
* The default mode (override) is to check system properties (and environment variables) first,
* before trying the specified properties.
* This allows system properties/environment variables to override any other property source.
*
* @see #SYSTEM_PROPERTIES_MODE_NEVER
* @see #SYSTEM_PROPERTIES_MODE_FALLBACK
* @see #SYSTEM_PROPERTIES_MODE_OVERRIDE
*/
public void setSystemPropertiesMode(int systemPropertiesMode) {
this.systemPropertiesMode = systemPropertiesMode;
}
public int getEnvironmentVariableMode() {
return environmentVariableMode;
}
/**
* Sets the OS environment variables mode.
*
* The default mode (fallback) is to check OS environment variables,
* if the property cannot be resolved from its sources first.
* This allows environment variables as fallback values.
*
* @see #ENVIRONMENT_VARIABLES_MODE_NEVER
* @see #ENVIRONMENT_VARIABLES_MODE_FALLBACK
* @see #ENVIRONMENT_VARIABLES_MODE_OVERRIDE
*/
public void setEnvironmentVariableMode(int environmentVariableMode) {
this.environmentVariableMode = environmentVariableMode;
}
@Override
public boolean isResolvePropertyPlaceholders() {
// its chicken and egg, we cannot resolve placeholders on ourselves
return false;
}
@Override
protected void doStart() throws Exception {
super.doStart();
if (systemPropertiesMode != SYSTEM_PROPERTIES_MODE_NEVER
&& systemPropertiesMode != SYSTEM_PROPERTIES_MODE_FALLBACK
&& systemPropertiesMode != SYSTEM_PROPERTIES_MODE_OVERRIDE) {
throw new IllegalArgumentException("Option systemPropertiesMode has invalid value: " + systemPropertiesMode);
}
if (environmentVariableMode != ENVIRONMENT_VARIABLES_MODE_NEVER
&& environmentVariableMode != ENVIRONMENT_VARIABLES_MODE_FALLBACK
&& environmentVariableMode != ENVIRONMENT_VARIABLES_MODE_OVERRIDE) {
throw new IllegalArgumentException("Option environmentVariableMode has invalid value: " + environmentVariableMode);
}
// inject the component to the parser
if (propertiesParser instanceof DefaultPropertiesParser) {
((DefaultPropertiesParser) propertiesParser).setPropertiesComponent(this);
}
}
@Override
protected void doStop() throws Exception {
cacheMap.clear();
super.doStop();
}
private List<PropertiesLocation> parseLocations(List<PropertiesLocation> locations) {
List<PropertiesLocation> answer = new ArrayList<>();
for (PropertiesLocation location : locations) {
log.trace("Parsing location: {}", location);
try {
String path = FilePathResolver.resolvePath(location.getPath());
log.debug("Parsed location: {}", path);
if (ObjectHelper.isNotEmpty(path)) {
answer.add(new PropertiesLocation(
location.getResolver(),
path,
location.isOptional())
);
}
} catch (IllegalArgumentException e) {
if (!ignoreMissingLocation && !location.isOptional()) {
throw e;
} else {
log.debug("Ignored missing location: {}", location);
}
}
}
// must return a not-null answer
return answer;
}
/**
* Key used in the locations cache
*/
private static final class CacheKey implements Serializable {
private static final long serialVersionUID = 1L;
private final List<PropertiesLocation> locations;
private CacheKey(List<PropertiesLocation> locations) {
this.locations = new ArrayList<>(locations);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CacheKey that = (CacheKey) o;
return locations.equals(that.locations);
}
@Override
public int hashCode() {
return locations.hashCode();
}
@Override
public String toString() {
return "LocationKey[" + locations.toString() + "]";
}
}
}
| CAMEL-13425: Properties component can now clear its cache, also added JMX attributes/operations.
| components/camel-properties/src/main/java/org/apache/camel/component/properties/PropertiesComponent.java | CAMEL-13425: Properties component can now clear its cache, also added JMX attributes/operations. |
|
Java | apache-2.0 | 0350fe8671a607c9d6f49a07ecde111f45580c98 | 0 | metaborg/jsglr,metaborg/jsglr,metaborg/jsglr,metaborg/jsglr | package org.spoofax.jsglr2;
import java.util.Objects;
import org.metaborg.parsetable.IParseTable;
import org.spoofax.interpreter.terms.IStrategoTerm;
import org.spoofax.jsglr2.imploder.*;
import org.spoofax.jsglr2.imploder.incremental.IncrementalStrategoTermImploder;
import org.spoofax.jsglr2.parseforest.IParseForest;
import org.spoofax.jsglr2.parseforest.ParseForestConstruction;
import org.spoofax.jsglr2.parseforest.ParseForestRepresentation;
import org.spoofax.jsglr2.parser.IParser;
import org.spoofax.jsglr2.parser.ParserVariant;
import org.spoofax.jsglr2.reducing.Reducing;
import org.spoofax.jsglr2.stack.StackRepresentation;
import org.spoofax.jsglr2.stack.collections.ActiveStacksRepresentation;
import org.spoofax.jsglr2.stack.collections.ForActorStacksRepresentation;
import org.spoofax.jsglr2.tokens.NullTokenizer;
import org.spoofax.jsglr2.tokens.TokenizerVariant;
public class JSGLR2Variant {
public final ParserVariant parser;
public final ImploderVariant imploder;
public final TokenizerVariant tokenizer;
public JSGLR2Variant(ParserVariant parserVariant, ImploderVariant imploderVariant,
TokenizerVariant tokenizerVariant) {
this.parser = parserVariant;
this.imploder = imploderVariant;
this.tokenizer = tokenizerVariant;
}
private <ParseForest extends IParseForest> IImploder<ParseForest, TreeImploder.SubTree<IStrategoTerm>>
getImploder() {
switch(this.imploder) {
default:
case Recursive:
return new StrategoTermImploder<>();
case RecursiveIncremental:
return new IncrementalStrategoTermImploder<>();
case Iterative:
return new IterativeStrategoTermImploder<>();
}
}
private ITokenizer<TreeImploder.SubTree<IStrategoTerm>, IStrategoTerm> getTokenizer() {
switch(this.tokenizer) {
default:
case Null:
return (input, filename, implodeResult) -> new TokenizeResult<>(null, implodeResult.tree);
case Recursive:
return new StrategoTermTokenizer();
case Iterative:
return new IterativeStrategoTermTokenizer();
}
}
public JSGLR2<IStrategoTerm> getJSGLR2(IParseTable parseTable) {
if(!this.isValid())
throw new IllegalStateException("Invalid JSGLR2 variant");
@SuppressWarnings("unchecked") final IParser<IParseForest> parser =
(IParser<IParseForest>) this.parser.getParser(parseTable);
if(this.parser.parseForestRepresentation == ParseForestRepresentation.Null)
return new JSGLR2Implementation<>(parser, new NullStrategoImploder<>(), new NullTokenizer<>());
if(this.imploder == ImploderVariant.TokenizedRecursive)
return new JSGLR2Implementation<>(parser, new TokenizedStrategoTermImploder<>(), new NullTokenizer<>());
return new JSGLR2Implementation<>(parser, getImploder(), getTokenizer());
}
public String name() {
return parser.name() + "//Imploder:" + imploder.name() + "//Tokenizer:" + tokenizer.name();
}
@Override public boolean equals(Object o) {
if(this == o)
return true;
if(o == null || getClass() != o.getClass())
return false;
JSGLR2Variant variant = (JSGLR2Variant) o;
return Objects.equals(parser, variant.parser) && imploder == variant.imploder && tokenizer == variant.tokenizer;
}
public boolean isValid() {
return parser.isValid()
&& (imploder == ImploderVariant.TokenizedRecursive) == (tokenizer == TokenizerVariant.Null);
}
public enum Preset {
// @formatter:off
standard(
new JSGLR2Variant(
new ParserVariant(
ActiveStacksRepresentation.standard(),
ForActorStacksRepresentation.standard(),
ParseForestRepresentation.standard(),
ParseForestConstruction.standard(),
StackRepresentation.standard(),
Reducing.standard()),
ImploderVariant.standard(),
TokenizerVariant.standard())),
dataDependent(
new JSGLR2Variant(
new ParserVariant(
ActiveStacksRepresentation.standard(),
ForActorStacksRepresentation.standard(),
ParseForestRepresentation.DataDependent,
ParseForestConstruction.standard(),
StackRepresentation.Hybrid,
Reducing.DataDependent),
ImploderVariant.standard(),
TokenizerVariant.standard())),
layoutSensitive(
new JSGLR2Variant(
new ParserVariant(
ActiveStacksRepresentation.standard(),
ForActorStacksRepresentation.standard(),
ParseForestRepresentation.LayoutSensitive,
ParseForestConstruction.standard(),
StackRepresentation.Hybrid,
Reducing.LayoutSensitive),
ImploderVariant.standard(),
TokenizerVariant.standard())),
incremental(
new JSGLR2Variant(
new ParserVariant(
ActiveStacksRepresentation.standard(),
ForActorStacksRepresentation.standard(),
ParseForestRepresentation.Incremental,
ParseForestConstruction.Full,
StackRepresentation.Hybrid,
Reducing.Incremental),
ImploderVariant.RecursiveIncremental,
TokenizerVariant.Recursive));
// @formatter:on
public JSGLR2Variant variant;
Preset(JSGLR2Variant variant) {
this.variant = variant;
}
public JSGLR2<IStrategoTerm> getJSGLR2(IParseTable parseTable) {
return variant.getJSGLR2(parseTable);
}
}
}
| org.spoofax.jsglr2/src/main/java/org/spoofax/jsglr2/JSGLR2Variant.java | package org.spoofax.jsglr2;
import java.util.Objects;
import org.metaborg.parsetable.IParseTable;
import org.spoofax.interpreter.terms.IStrategoTerm;
import org.spoofax.jsglr2.imploder.*;
import org.spoofax.jsglr2.imploder.incremental.IncrementalStrategoTermImploder;
import org.spoofax.jsglr2.parseforest.IParseForest;
import org.spoofax.jsglr2.parseforest.ParseForestConstruction;
import org.spoofax.jsglr2.parseforest.ParseForestRepresentation;
import org.spoofax.jsglr2.parser.IParser;
import org.spoofax.jsglr2.parser.ParserVariant;
import org.spoofax.jsglr2.reducing.Reducing;
import org.spoofax.jsglr2.stack.StackRepresentation;
import org.spoofax.jsglr2.stack.collections.ActiveStacksRepresentation;
import org.spoofax.jsglr2.stack.collections.ForActorStacksRepresentation;
import org.spoofax.jsglr2.tokens.NullTokenizer;
import org.spoofax.jsglr2.tokens.TokenizerVariant;
public class JSGLR2Variant {
public final ParserVariant parser;
public final ImploderVariant imploder;
public final TokenizerVariant tokenizer;
public JSGLR2Variant(ParserVariant parserVariant, ImploderVariant imploderVariant,
TokenizerVariant tokenizerVariant) {
this.parser = parserVariant;
this.imploder = imploderVariant;
this.tokenizer = tokenizerVariant;
}
private <ParseForest extends IParseForest> IImploder<ParseForest, TreeImploder.SubTree<IStrategoTerm>>
getImploder() {
switch(this.imploder) {
default:
case Recursive:
return new StrategoTermImploder<>();
case RecursiveIncremental:
return new IncrementalStrategoTermImploder<>();
case Iterative:
return new IterativeStrategoTermImploder<>();
}
}
private ITokenizer<TreeImploder.SubTree<IStrategoTerm>, IStrategoTerm> getTokenizer() {
switch(this.tokenizer) {
default:
case Null:
return (input, filename, implodeResult) -> new TokenizeResult<>(null, implodeResult.tree);
case Recursive:
return new StrategoTermTokenizer();
case Iterative:
return new IterativeStrategoTermTokenizer();
}
}
public JSGLR2<IStrategoTerm> getJSGLR2(IParseTable parseTable) {
if(!this.isValid())
throw new IllegalStateException("Invalid JSGLR2 variant");
@SuppressWarnings("unchecked") final IParser<IParseForest> parser =
(IParser<IParseForest>) this.parser.getParser(parseTable);
if(this.parser.parseForestRepresentation == ParseForestRepresentation.Null)
return new JSGLR2Implementation<>(parser, new NullStrategoImploder<>(), new NullTokenizer<>());
if(this.imploder == ImploderVariant.TokenizedRecursive)
return new JSGLR2Implementation<>(parser, new TokenizedStrategoTermImploder<>(), new NullTokenizer<>());
return new JSGLR2Implementation<>(parser, getImploder(), getTokenizer());
}
public String name() {
return parser.name() + "//Imploder:" + imploder.name() + "//Tokenizer:" + tokenizer.name();
}
@Override public boolean equals(Object o) {
if(this == o)
return true;
if(o == null || getClass() != o.getClass())
return false;
JSGLR2Variant variant = (JSGLR2Variant) o;
return Objects.equals(parser, variant.parser) && imploder == variant.imploder && tokenizer == variant.tokenizer;
}
public boolean isValid() {
return parser.isValid()
&& (imploder == ImploderVariant.TokenizedRecursive) == (tokenizer == TokenizerVariant.Null);
}
public enum Preset {
// @formatter:off
standard(
new JSGLR2Variant(
new ParserVariant(
ActiveStacksRepresentation.standard(),
ForActorStacksRepresentation.standard(),
ParseForestRepresentation.standard(),
ParseForestConstruction.standard(),
StackRepresentation.standard(),
Reducing.standard()),
ImploderVariant.standard(),
TokenizerVariant.standard())),
dataDependent(
new JSGLR2Variant(
new ParserVariant(
ActiveStacksRepresentation.standard(),
ForActorStacksRepresentation.standard(),
ParseForestRepresentation.DataDependent,
ParseForestConstruction.standard(),
StackRepresentation.Basic,
Reducing.DataDependent),
ImploderVariant.standard(),
TokenizerVariant.standard())),
layoutSensitive(
new JSGLR2Variant(
new ParserVariant(
ActiveStacksRepresentation.standard(),
ForActorStacksRepresentation.standard(),
ParseForestRepresentation.LayoutSensitive,
ParseForestConstruction.standard(),
StackRepresentation.Basic,
Reducing.LayoutSensitive),
ImploderVariant.standard(),
TokenizerVariant.standard())),
incremental(
new JSGLR2Variant(
new ParserVariant(
ActiveStacksRepresentation.standard(),
ForActorStacksRepresentation.standard(),
ParseForestRepresentation.Incremental,
ParseForestConstruction.Full,
StackRepresentation.Basic,
Reducing.Incremental),
ImploderVariant.RecursiveIncremental,
TokenizerVariant.Recursive));
// @formatter:on
public JSGLR2Variant variant;
Preset(JSGLR2Variant variant) {
this.variant = variant;
}
public JSGLR2<IStrategoTerm> getJSGLR2(IParseTable parseTable) {
return variant.getJSGLR2(parseTable);
}
}
}
| Use Hybrid stack representation instead of Basic for non-standard variants
| org.spoofax.jsglr2/src/main/java/org/spoofax/jsglr2/JSGLR2Variant.java | Use Hybrid stack representation instead of Basic for non-standard variants |
|
Java | apache-2.0 | 0dddd796226faf118fbc605a7dd42fc701ee7313 | 0 | liuyuanyuan/dbeaver,AndrewKhitrin/dbeaver,ruspl-afed/dbeaver,liuyuanyuan/dbeaver,liuyuanyuan/dbeaver,AndrewKhitrin/dbeaver,AndrewKhitrin/dbeaver,AndrewKhitrin/dbeaver,ruspl-afed/dbeaver,liuyuanyuan/dbeaver,liuyuanyuan/dbeaver,ruspl-afed/dbeaver,ruspl-afed/dbeaver | /*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2015 Serge Rieder ([email protected])
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License (version 2)
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package org.jkiss.dbeaver.ui.actions.navigator;
import org.eclipse.core.commands.ExecutionEvent;
import org.eclipse.core.commands.ExecutionException;
import org.eclipse.core.resources.IContainer;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IFolder;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.Path;
import org.eclipse.core.runtime.QualifiedName;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.ui.handlers.HandlerUtil;
import org.jkiss.code.NotNull;
import org.jkiss.dbeaver.core.DBeaverUI;
import org.jkiss.dbeaver.model.navigator.DBNDatabaseNode;
import org.jkiss.dbeaver.model.navigator.DBNNode;
import org.jkiss.dbeaver.model.navigator.DBNResource;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.model.runtime.DBRRunnableWithProgress;
import org.jkiss.dbeaver.ui.UIUtils;
import org.jkiss.dbeaver.ui.dnd.TreeNodeTransfer;
import org.jkiss.dbeaver.ui.navigator.NavigatorUtils;
import org.jkiss.dbeaver.utils.RuntimeUtils;
import org.jkiss.utils.CommonUtils;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.InvocationTargetException;
import java.util.Collection;
import java.util.Map;
public class NavigatorHandlerObjectCreateCopy extends NavigatorHandlerObjectCreateBase {
@Override
public Object execute(ExecutionEvent event) throws ExecutionException {
final ISelection selection = HandlerUtil.getCurrentSelection(event);
DBNNode curNode = NavigatorUtils.getSelectedNode(selection);
if (curNode != null) {
Collection<DBNNode> cbNodes = TreeNodeTransfer.getFromClipboard();
if (cbNodes == null) {
UIUtils.showErrorDialog(HandlerUtil.getActiveShell(event), "Paste error", "Clipboard contains data in unsupported format");
return null;
}
for (DBNNode nodeObject : cbNodes) {
if (nodeObject instanceof DBNDatabaseNode) {
createNewObject(HandlerUtil.getActiveWorkbenchWindow(event), curNode, ((DBNDatabaseNode)nodeObject));
} else if (nodeObject instanceof DBNResource && curNode instanceof DBNResource) {
pasteResource((DBNResource)nodeObject, (DBNResource)curNode);
}
}
}
return null;
}
private void pasteResource(DBNResource resourceNode, DBNResource toFolder) {
final IResource resource = resourceNode.getResource();
final IResource targetResource = toFolder.getResource();
assert resource != null;
assert targetResource != null;
final IContainer targetFolder = targetResource instanceof IContainer ? (IContainer) targetResource : targetResource.getParent();
try {
DBeaverUI.runInProgressService(new DBRRunnableWithProgress() {
@Override
public void run(DBRProgressMonitor monitor) throws InvocationTargetException, InterruptedException {
try {
copyResource(monitor, resource, targetFolder);
} catch (Exception e) {
throw new InvocationTargetException(e);
}
}
});
} catch (InvocationTargetException e) {
UIUtils.showErrorDialog(null, "Copy error", "Error copying resource", e.getTargetException());
} catch (InterruptedException e) {
// ignore
}
}
private void copyResource(@NotNull DBRProgressMonitor monitor, @NotNull IResource resource, @NotNull IContainer targetFolder) throws CoreException, IOException {
final IProgressMonitor nestedMonitor = RuntimeUtils.getNestedMonitor(monitor);
final String extension = resource.getFileExtension();
String targetName = resource.getName();
if (resource.getParent().equals(targetFolder)) {
String plainName = extension != null && !extension.isEmpty() && targetName.endsWith(extension) ?
targetName.substring(0, targetName.length() - extension.length() - 1) : targetName;
for (int i = 1; ; i++) {
String testName = plainName + "-" + i;
if (!CommonUtils.isEmpty(extension)) {
testName += "." + extension;
}
if (targetFolder.findMember(testName) == null) {
targetName = testName;
break;
}
}
} else if (targetFolder.findMember(targetName) != null) {
throw new IOException("Target resource '" + targetName + "' already exists");
}
if (resource instanceof IFile) {
// Copy single file
final IFile targetFile = targetFolder.getFile(new Path(targetName));
if (!targetFile.exists()) {
targetFile.create(new ByteArrayInputStream(new byte[0]), true, nestedMonitor);
}
final Map<QualifiedName, String> props = resource.getPersistentProperties();
if (props != null && !props.isEmpty()) {
for (Map.Entry<QualifiedName, String> prop : props.entrySet()) {
targetFile.setPersistentProperty(prop.getKey(), prop.getValue());
}
}
try (InputStream is = ((IFile) resource).getContents()) {
targetFile.setContents(is, true, true, nestedMonitor);
}
} else if (resource instanceof IFolder) {
// Copy folder with all files and subfolders
}
}
} | plugins/org.jkiss.dbeaver.core/src/org/jkiss/dbeaver/ui/actions/navigator/NavigatorHandlerObjectCreateCopy.java | /*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2015 Serge Rieder ([email protected])
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License (version 2)
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package org.jkiss.dbeaver.ui.actions.navigator;
import org.eclipse.core.commands.ExecutionEvent;
import org.eclipse.core.commands.ExecutionException;
import org.eclipse.core.resources.IContainer;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IFolder;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.Path;
import org.eclipse.core.runtime.QualifiedName;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.ui.handlers.HandlerUtil;
import org.jkiss.code.NotNull;
import org.jkiss.dbeaver.core.DBeaverUI;
import org.jkiss.dbeaver.model.navigator.DBNDatabaseNode;
import org.jkiss.dbeaver.model.navigator.DBNNode;
import org.jkiss.dbeaver.model.navigator.DBNResource;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.model.runtime.DBRRunnableWithProgress;
import org.jkiss.dbeaver.ui.UIUtils;
import org.jkiss.dbeaver.ui.dnd.TreeNodeTransfer;
import org.jkiss.dbeaver.ui.navigator.NavigatorUtils;
import org.jkiss.dbeaver.utils.RuntimeUtils;
import org.jkiss.utils.CommonUtils;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.InvocationTargetException;
import java.util.Collection;
import java.util.Map;
public class NavigatorHandlerObjectCreateCopy extends NavigatorHandlerObjectCreateBase {
@Override
public Object execute(ExecutionEvent event) throws ExecutionException {
final ISelection selection = HandlerUtil.getCurrentSelection(event);
DBNNode curNode = NavigatorUtils.getSelectedNode(selection);
if (curNode != null) {
Collection<DBNNode> cbNodes = TreeNodeTransfer.getFromClipboard();
if (cbNodes == null) {
UIUtils.showErrorDialog(HandlerUtil.getActiveShell(event), "Paste error", "Clipboard contains data in unsupported format");
return null;
}
for (DBNNode nodeObject : cbNodes) {
if (nodeObject instanceof DBNDatabaseNode) {
createNewObject(HandlerUtil.getActiveWorkbenchWindow(event), curNode, ((DBNDatabaseNode)nodeObject));
} else if (nodeObject instanceof DBNResource && curNode instanceof DBNResource) {
pasteResource((DBNResource)nodeObject, (DBNResource)curNode);
}
}
}
return null;
}
private void pasteResource(DBNResource resourceNode, DBNResource toFolder) {
final IResource resource = resourceNode.getResource();
final IResource targetResource = toFolder.getResource();
assert resource != null;
assert targetResource != null;
final IContainer targetFolder = targetResource instanceof IContainer ? (IContainer) targetResource : targetResource.getParent();
try {
DBeaverUI.runInProgressService(new DBRRunnableWithProgress() {
@Override
public void run(DBRProgressMonitor monitor) throws InvocationTargetException, InterruptedException {
try {
copyResource(monitor, resource, targetFolder);
} catch (Exception e) {
throw new InvocationTargetException(e);
}
}
});
} catch (InvocationTargetException e) {
UIUtils.showErrorDialog(null, "Copy error", "Error copying resource", e.getTargetException());
} catch (InterruptedException e) {
// ignore
}
}
private void copyResource(@NotNull DBRProgressMonitor monitor, @NotNull IResource resource, @NotNull IContainer targetFolder) throws CoreException, IOException {
final IProgressMonitor nestedMonitor = RuntimeUtils.getNestedMonitor(monitor);
final String extension = resource.getFileExtension();
String targetName = resource.getName();
if (resource.getParent().equals(targetFolder)) {
String plainName = extension != null && !extension.isEmpty() && targetName.endsWith(extension) ?
targetName.substring(0, targetName.length() - extension.length() - 1) : targetName;
for (int i = 1; ; i++) {
String testName = plainName + "-" + i;
if (!CommonUtils.isEmpty(extension)) {
testName += "." + extension;
}
if (targetFolder.findMember(testName) == null) {
targetName = testName;
break;
}
}
}
if (resource instanceof IFile) {
// Copy single file
final IFile targetFile = targetFolder.getFile(new Path(targetName));
if (!targetFile.exists()) {
targetFile.create(new ByteArrayInputStream(new byte[0]), true, nestedMonitor);
}
final Map<QualifiedName, String> props = resource.getPersistentProperties();
if (props != null && !props.isEmpty()) {
for (Map.Entry<QualifiedName, String> prop : props.entrySet()) {
targetFile.setPersistentProperty(prop.getKey(), prop.getValue());
}
}
try (InputStream is = ((IFile) resource).getContents()) {
targetFile.setContents(is, true, true, nestedMonitor);
}
} else if (resource instanceof IFolder) {
// Copy folder with all files and subfolders
}
}
} | #150 Resources copy
| plugins/org.jkiss.dbeaver.core/src/org/jkiss/dbeaver/ui/actions/navigator/NavigatorHandlerObjectCreateCopy.java | #150 Resources copy |
|
Java | apache-2.0 | adcc35750887a98d99382a26fc49aec0d6299941 | 0 | davidwatkins73/waltz-dev,davidwatkins73/waltz-dev,davidwatkins73/waltz-dev,khartec/waltz,khartec/waltz,khartec/waltz,khartec/waltz,davidwatkins73/waltz-dev | package org.finos.waltz.integration_test.inmem.service;
import org.finos.waltz.common.exception.InsufficientPrivelegeException;
import org.finos.waltz.common.exception.NotFoundException;
import org.finos.waltz.integration_test.inmem.BaseInMemoryIntegrationTest;
import org.finos.waltz.integration_test.inmem.helpers.InvolvementHelper;
import org.finos.waltz.model.EntityKind;
import org.finos.waltz.model.EntityReference;
import org.finos.waltz.model.report_grid.*;
import org.finos.waltz.service.report_grid.ReportGridMemberService;
import org.finos.waltz.service.report_grid.ReportGridService;
import org.jooq.DSLContext;
import org.jooq.Record1;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.Optional;
import java.util.Set;
import static org.finos.waltz.common.CollectionUtilities.find;
import static org.finos.waltz.common.CollectionUtilities.maybeFirst;
import static org.finos.waltz.common.SetUtilities.asSet;
import static org.finos.waltz.integration_test.inmem.helpers.NameHelper.mkName;
import static org.finos.waltz.schema.Tables.REPORT_GRID_COLUMN_DEFINITION;
import static org.junit.Assert.*;
@Service
public class ReportGridServiceTest extends BaseInMemoryIntegrationTest {
@Autowired
private DSLContext dsl;
@Autowired
private ReportGridService reportGridService;
@Autowired
private ReportGridMemberService reportGridMemberService;
@Autowired
private InvolvementHelper involvementHelper;
@Test
public void canCreateAReportGrid() throws InsufficientPrivelegeException {
ReportGridDefinition def = mkGrid();
assertNotNull("expected a report grid definition is not null", def);
assertTrue("id should be set (positive integer)", def.id().get() > 0);
}
@Test
public void canGetColumnDefinitionsForGrid() throws InsufficientPrivelegeException {
ReportGridDefinition def = mkGrid();
assertEquals(1, def.columnDefinitions().size());
}
@Test
public void cannotRemoveANonExistentReportGrid() throws InsufficientPrivelegeException {
assertThrows(
"Cannot remove a non existent report grid",
NotFoundException.class,
() -> reportGridService.remove(-1, mkName("admin")));
}
@Test
public void cannotRemoveReportGridYouDoNotOwn() throws InsufficientPrivelegeException {
ReportGridDefinition grid = mkGrid();
assertThrows(
"Cannot remove a report grid the user does not own",
InsufficientPrivelegeException.class,
() -> reportGridService.remove(grid.id().get(), mkName("someone_else")));
}
@Test
public void canRemoveGridThatWeDoOwn() throws InsufficientPrivelegeException {
ReportGridDefinition grid = mkGrid();
Set<ReportGridMember> members = reportGridMemberService.findByGridId(grid.id().get());
Optional<ReportGridMember> maybeOwner = maybeFirst(
members,
m -> m.role() == ReportGridMemberRole.OWNER);
String ownerId = maybeOwner
.map(ReportGridMember::userId)
.orElseThrow(() -> new AssertionError("Should have an owner for a newly created grid"));
assertTrue("grid should have been removed", reportGridService.remove(grid.id().get(), ownerId));
assertTrue("members should have been removed", reportGridMemberService.findByGridId(grid.id().get()).isEmpty());
assertFalse("cannot find grid after it's been removed", find(reportGridService.findAll(), g -> g.id().equals(grid.id())).isPresent()); //check it's really gone
assertThrows(
"Cannot remove a report grid we have already removed",
NotFoundException.class,
() -> reportGridService.remove(grid.id().get(), ownerId));
Record1<Integer> count = dsl
.selectCount()
.from(REPORT_GRID_COLUMN_DEFINITION)
.where(REPORT_GRID_COLUMN_DEFINITION.REPORT_GRID_ID.eq(grid.id().get()))
.fetchOne();
assertEquals(Integer.valueOf(0), count.value1());
}
// -- HELPERS --------------
private ReportGridDefinition mkGrid() throws InsufficientPrivelegeException {
ReportGridCreateCommand cmd = ImmutableReportGridCreateCommand.builder()
.name(mkName("testReport"))
.build();
String admin = mkName("admin");
ReportGridDefinition def = reportGridService.create(cmd, admin);
long invKind = involvementHelper.mkInvolvementKind(mkName("dummyInv"));
ReportGridColumnDefinition colDef = ImmutableReportGridColumnDefinition
.builder()
.columnEntityReference(EntityReference.mkRef(EntityKind.INVOLVEMENT_KIND, invKind))
.position(10)
.build();
ReportGridColumnDefinitionsUpdateCommand colCmd = ImmutableReportGridColumnDefinitionsUpdateCommand
.builder()
.columnDefinitions(asSet(colDef))
.build();
return reportGridService.updateColumnDefinitions(
def.id().get(),
colCmd,
admin);
}
}
| waltz-integration-test/src/test/java/org/finos/waltz/integration_test/inmem/service/ReportGridServiceTest.java | package org.finos.waltz.integration_test.inmem.service;
import org.finos.waltz.common.exception.InsufficientPrivelegeException;
import org.finos.waltz.common.exception.NotFoundException;
import org.finos.waltz.integration_test.inmem.BaseInMemoryIntegrationTest;
import org.finos.waltz.integration_test.inmem.helpers.InvolvementHelper;
import org.finos.waltz.model.EntityKind;
import org.finos.waltz.model.EntityReference;
import org.finos.waltz.model.report_grid.*;
import org.finos.waltz.service.report_grid.ReportGridMemberService;
import org.finos.waltz.service.report_grid.ReportGridService;
import org.jooq.DSLContext;
import org.jooq.Record1;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.Optional;
import java.util.Set;
import static org.finos.waltz.common.CollectionUtilities.find;
import static org.finos.waltz.common.CollectionUtilities.maybeFirst;
import static org.finos.waltz.common.SetUtilities.asSet;
import static org.finos.waltz.integration_test.inmem.helpers.NameHelper.mkName;
import static org.finos.waltz.schema.Tables.REPORT_GRID_COLUMN_DEFINITION;
import static org.junit.Assert.*;
@Service
public class ReportGridServiceTest extends BaseInMemoryIntegrationTest {
@Autowired
private DSLContext dsl;
@Autowired
private ReportGridService reportGridService;
@Autowired
private ReportGridMemberService reportGridMemberService;
@Autowired
private InvolvementHelper involvementHelper;
@Test
public void canCreateAReportGrid() throws InsufficientPrivelegeException {
ReportGridDefinition def = mkGrid();
assertNotNull("expected a report grid definition is not null", def);
assertTrue("id should be set (positive integer)", def.id().get() > 0);
}
@Test
public void canGetColumnDefinitionsForGrid() throws InsufficientPrivelegeException {
ReportGridDefinition def = mkGrid();
assertEquals(1, def.columnDefinitions().size());
}
@Test
public void cannotRemoveANonExistentReportGrid() throws InsufficientPrivelegeException {
assertThrows(
"Cannot remove a non existent report grid",
NotFoundException.class,
() -> reportGridService.remove(-1, mkName("admin")));
}
@Test
public void cannotRemoveReportGridYouDoNotOwn() throws InsufficientPrivelegeException {
ReportGridDefinition grid = mkGrid();
assertThrows(
"Cannot remove a report grid the user does not own",
InsufficientPrivelegeException.class,
() -> reportGridService.remove(grid.id().get(), mkName("someone_else")));
}
@Test
public void canRemoveGridThatWeDoOwn() throws InsufficientPrivelegeException {
ReportGridDefinition grid = mkGrid();
Set<ReportGridMember> members = reportGridMemberService.findByGridId(grid.id().get());
Optional<ReportGridMember> maybeOwner = maybeFirst(
members,
m -> m.role() == ReportGridMemberRole.OWNER);
String ownerId = maybeOwner
.map(ReportGridMember::userId)
.orElseThrow(() -> new AssertionError("Should have an owner for a newly created grid"));
assertTrue("grid should have been removed", reportGridService.remove(grid.id().get(), ownerId));
assertTrue("members should have been removed", reportGridMemberService.findByGridId(grid.id().get()).isEmpty());
assertFalse("cannot find grid after it's been removed", find(reportGridService.findAll(), g -> g.id().equals(grid.id())).isPresent()); //check it's really gone
assertThrows(
"Cannot remove a report grid we have already removed",
NotFoundException.class,
() -> reportGridService.remove(grid.id().get(), ownerId));
Record1<Integer> count = dsl
.selectCount()
.from(REPORT_GRID_COLUMN_DEFINITION)
.where(REPORT_GRID_COLUMN_DEFINITION.REPORT_GRID_ID.eq(grid.id().get()))
.fetchOne();
assertEquals(Integer.valueOf(0), count.value1());
}
// -- HELPERS --------------
private ReportGridDefinition mkGrid() throws InsufficientPrivelegeException {
ReportGridCreateCommand cmd = ImmutableReportGridCreateCommand.builder()
.name(mkName("testReport"))
.externalId(mkName("extId"))
.build();
String admin = mkName("admin");
ReportGridDefinition def = reportGridService.create(cmd, admin);
long invKind = involvementHelper.mkInvolvementKind(mkName("dummyInv"));
ReportGridColumnDefinition colDef = ImmutableReportGridColumnDefinition
.builder()
.columnEntityReference(EntityReference.mkRef(EntityKind.INVOLVEMENT_KIND, invKind))
.position(10)
.build();
ReportGridColumnDefinitionsUpdateCommand colCmd = ImmutableReportGridColumnDefinitionsUpdateCommand
.builder()
.columnDefinitions(asSet(colDef))
.build();
return reportGridService.updateColumnDefinitions(
def.id().get(),
colCmd,
admin);
}
}
| Fixing integration test
#CTCTOWALTZ-2410
#5862
| waltz-integration-test/src/test/java/org/finos/waltz/integration_test/inmem/service/ReportGridServiceTest.java | Fixing integration test |
|
Java | bsd-2-clause | 4d35174d97b83b53c57a38018ccbef88588ddb68 | 0 | l2-/runelite,runelite/runelite,runelite/runelite,abelbriggs1/runelite,abelbriggs1/runelite,l2-/runelite,abelbriggs1/runelite,runelite/runelite,Sethtroll/runelite,Sethtroll/runelite | /*
* Copyright (c) 2017. l2-
* Copyright (c) 2017, Adam <[email protected]>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.client.plugins.chatcommands;
import com.google.common.eventbus.Subscribe;
import com.google.inject.Provides;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ScheduledExecutorService;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.inject.Inject;
import lombok.Value;
import lombok.extern.slf4j.Slf4j;
import net.runelite.api.ChatMessageType;
import net.runelite.api.Client;
import net.runelite.api.Experience;
import net.runelite.api.GameState;
import net.runelite.api.ItemComposition;
import net.runelite.api.MessageNode;
import net.runelite.api.events.ChatMessage;
import net.runelite.api.events.GameTick;
import net.runelite.api.events.SetMessage;
import net.runelite.api.events.WidgetLoaded;
import net.runelite.api.vars.AccountType;
import net.runelite.api.widgets.Widget;
import static net.runelite.api.widgets.WidgetID.KILL_LOGS_GROUP_ID;
import net.runelite.api.widgets.WidgetInfo;
import net.runelite.client.chat.ChatColorType;
import net.runelite.client.chat.ChatMessageBuilder;
import net.runelite.client.chat.ChatMessageManager;
import net.runelite.client.chat.ChatboxInputListener;
import net.runelite.client.chat.CommandManager;
import net.runelite.client.config.ConfigManager;
import net.runelite.client.events.ChatboxInput;
import net.runelite.client.events.PrivateMessageInput;
import net.runelite.client.game.ItemManager;
import net.runelite.client.input.KeyManager;
import net.runelite.client.plugins.Plugin;
import net.runelite.client.plugins.PluginDescriptor;
import net.runelite.client.util.StackFormatter;
import net.runelite.http.api.hiscore.HiscoreClient;
import net.runelite.http.api.hiscore.HiscoreEndpoint;
import net.runelite.http.api.hiscore.HiscoreResult;
import net.runelite.http.api.hiscore.HiscoreSkill;
import net.runelite.http.api.hiscore.SingleHiscoreSkillResult;
import net.runelite.http.api.hiscore.Skill;
import net.runelite.http.api.item.Item;
import net.runelite.http.api.item.ItemPrice;
import net.runelite.http.api.item.SearchResult;
import net.runelite.http.api.kc.KillCountClient;
@PluginDescriptor(
name = "Chat Commands",
description = "Enable chat commands",
tags = {"grand", "exchange", "level", "prices"}
)
@Slf4j
public class ChatCommandsPlugin extends Plugin implements ChatboxInputListener
{
private static final float HIGH_ALCHEMY_CONSTANT = 0.6f;
private static final Pattern KILLCOUNT_PATERN = Pattern.compile("Your (.+) kill count is: <col=ff0000>(\\d+)</col>.");
private static final Pattern RAIDS_PATTERN = Pattern.compile("Your completed (.+) count is: <col=ff0000>(\\d+)</col>.");
private static final Pattern WINTERTODT_PATERN = Pattern.compile("Your subdued Wintertodt count is: <col=ff0000>(\\d+)</col>.");
private static final Pattern BARROWS_PATERN = Pattern.compile("Your Barrows chest count is: <col=ff0000>(\\d+)</col>.");
private static final String TOTAL_LEVEL_COMMAND_STRING = "!total";
private static final String PRICE_COMMAND_STRING = "!price";
private static final String LEVEL_COMMAND_STRING = "!lvl";
private static final String CLUES_COMMAND_STRING = "!clues";
private static final String KILLCOUNT_COMMAND_STRING = "!kc";
private static final String CMB_COMMAND_STRING = "!cmb";
private final HiscoreClient hiscoreClient = new HiscoreClient();
private final KillCountClient killCountClient = new KillCountClient();
private boolean logKills;
@Inject
private Client client;
@Inject
private ChatCommandsConfig config;
@Inject
private ConfigManager configManager;
@Inject
private ItemManager itemManager;
@Inject
private ChatMessageManager chatMessageManager;
@Inject
private ScheduledExecutorService executor;
@Inject
private KeyManager keyManager;
@Inject
private ChatKeyboardListener chatKeyboardListener;
@Inject
private CommandManager commandManager;
@Override
public void startUp()
{
keyManager.registerKeyListener(chatKeyboardListener);
commandManager.register(this);
}
@Override
public void shutDown()
{
keyManager.unregisterKeyListener(chatKeyboardListener);
commandManager.unregister(this);
}
@Provides
ChatCommandsConfig provideConfig(ConfigManager configManager)
{
return configManager.getConfig(ChatCommandsConfig.class);
}
private void setKc(String boss, int killcount)
{
configManager.setConfiguration("killcount." + client.getUsername().toLowerCase(),
boss.toLowerCase(), killcount);
}
private int getKc(String boss)
{
Integer killCount = configManager.getConfiguration("killcount." + client.getUsername().toLowerCase(),
boss.toLowerCase(), int.class);
return killCount == null ? 0 : killCount;
}
/**
* Checks if the chat message is a command.
*
* @param setMessage The chat message.
*/
@Subscribe
public void onSetMessage(SetMessage setMessage)
{
if (client.getGameState() != GameState.LOGGED_IN)
{
return;
}
switch (setMessage.getType())
{
case PUBLIC:
case PUBLIC_MOD:
case CLANCHAT:
case PRIVATE_MESSAGE_RECEIVED:
case PRIVATE_MESSAGE_SENT:
break;
default:
return;
}
String message = setMessage.getValue();
MessageNode messageNode = setMessage.getMessageNode();
// clear RuneLite formatted message as the message node is
// being reused
messageNode.setRuneLiteFormatMessage(null);
if (config.lvl() && message.toLowerCase().equals(TOTAL_LEVEL_COMMAND_STRING))
{
log.debug("Running total level lookup");
executor.submit(() -> playerSkillLookup(setMessage, "total"));
}
else if (config.lvl() && message.toLowerCase().equals(CMB_COMMAND_STRING))
{
log.debug("Running combat level lookup");
executor.submit(() -> combatLevelLookup(setMessage.getType(), setMessage));
}
else if (config.price() && message.toLowerCase().startsWith(PRICE_COMMAND_STRING + " "))
{
String search = message.substring(PRICE_COMMAND_STRING.length() + 1);
log.debug("Running price lookup for {}", search);
executor.submit(() -> itemPriceLookup(setMessage.getMessageNode(), search));
}
else if (config.lvl() && message.toLowerCase().startsWith(LEVEL_COMMAND_STRING + " "))
{
String search = message.substring(LEVEL_COMMAND_STRING.length() + 1);
log.debug("Running level lookup for {}", search);
executor.submit(() -> playerSkillLookup(setMessage, search));
}
else if (config.clue() && message.toLowerCase().equals(CLUES_COMMAND_STRING))
{
log.debug("Running lookup for overall clues");
executor.submit(() -> playerClueLookup(setMessage, "total"));
}
else if (config.clue() && message.toLowerCase().startsWith(CLUES_COMMAND_STRING + " "))
{
String search = message.substring(CLUES_COMMAND_STRING.length() + 1);
log.debug("Running clue lookup for {}", search);
executor.submit(() -> playerClueLookup(setMessage, search));
}
else if (config.killcount() && message.toLowerCase().startsWith(KILLCOUNT_COMMAND_STRING + " "))
{
String search = message.substring(KILLCOUNT_COMMAND_STRING.length() + 1);
log.debug("Running killcount lookup for {}", search);
executor.submit(() -> killCountLookup(setMessage.getType(), setMessage, search));
}
}
@Subscribe
public void onChatMessage(ChatMessage chatMessage)
{
if (chatMessage.getType() != ChatMessageType.SERVER && chatMessage.getType() != ChatMessageType.FILTERED)
{
return;
}
String message = chatMessage.getMessage();
Matcher matcher = KILLCOUNT_PATERN.matcher(message);
if (matcher.find())
{
String boss = matcher.group(1);
int kc = Integer.parseInt(matcher.group(2));
setKc(boss, kc);
}
matcher = WINTERTODT_PATERN.matcher(message);
if (matcher.find())
{
int kc = Integer.parseInt(matcher.group(1));
setKc("Wintertodt", kc);
}
matcher = RAIDS_PATTERN.matcher(message);
if (matcher.find())
{
String boss = matcher.group(1);
int kc = Integer.parseInt(matcher.group(2));
setKc(boss, kc);
}
matcher = BARROWS_PATERN.matcher(message);
if (matcher.find())
{
int kc = Integer.parseInt(matcher.group(1));
setKc("Barrows", kc);
}
}
@Subscribe
public void onGameTick(GameTick event)
{
if (!logKills)
{
return;
}
logKills = false;
Widget title = client.getWidget(WidgetInfo.KILL_LOG_TITLE);
Widget bossMonster = client.getWidget(WidgetInfo.KILL_LOG_MONSTER);
Widget bossKills = client.getWidget(WidgetInfo.KILL_LOG_KILLS);
if (title == null || bossMonster == null || bossKills == null
|| !"Boss Kill Log".equals(title.getText()))
{
return;
}
Widget[] bossChildren = bossMonster.getChildren();
Widget[] killsChildren = bossKills.getChildren();
for (int i = 0; i < bossChildren.length; ++i)
{
Widget boss = bossChildren[i];
Widget kill = killsChildren[i];
String bossName = boss.getText();
int kc = Integer.parseInt(kill.getText().replace(",", ""));
if (kc != getKc(bossName))
{
setKc(bossName, kc);
}
}
}
@Subscribe
public void onWidgetLoaded(WidgetLoaded widget)
{
// don't load kc if in an instance, if the player is in another players poh
// and reading their boss log
if (widget.getGroupId() != KILL_LOGS_GROUP_ID || client.isInInstancedRegion())
{
return;
}
logKills = true;
}
@Override
public boolean onChatboxInput(ChatboxInput chatboxInput)
{
final String value = chatboxInput.getValue();
if (!config.killcount() || !value.startsWith("!kc ") && !value.startsWith("/!kc "))
{
return false;
}
int idx = value.indexOf(' ');
final String boss = longBossName(value.substring(idx + 1));
final int kc = getKc(boss);
if (kc <= 0)
{
return false;
}
final String playerName = client.getLocalPlayer().getName();
executor.execute(() ->
{
try
{
killCountClient.submit(playerName, boss, kc);
}
catch (Exception ex)
{
log.warn("unable to submit killcount", ex);
}
finally
{
chatboxInput.resume();
}
});
return true;
}
@Override
public boolean onPrivateMessageInput(PrivateMessageInput privateMessageInput)
{
final String message = privateMessageInput.getMessage();
if (!config.killcount() || !message.startsWith("!kc "))
{
return false;
}
int idx = message.indexOf(' ');
final String boss = longBossName(message.substring(idx + 1));
final int kc = getKc(boss);
if (kc <= 0)
{
return false;
}
final String playerName = client.getLocalPlayer().getName();
executor.execute(() ->
{
try
{
killCountClient.submit(playerName, boss, kc);
}
catch (Exception ex)
{
log.warn("unable to submit killcount", ex);
}
finally
{
privateMessageInput.resume();
}
});
return true;
}
private void killCountLookup(ChatMessageType type, SetMessage setMessage, String search)
{
final String player;
if (type.equals(ChatMessageType.PRIVATE_MESSAGE_SENT))
{
player = client.getLocalPlayer().getName();
}
else
{
player = sanitize(setMessage.getName());
}
search = longBossName(search);
final int kc;
try
{
kc = killCountClient.get(player, search);
}
catch (IOException ex)
{
log.debug("unable to lookup killcount", ex);
return;
}
String response = new ChatMessageBuilder()
.append(ChatColorType.HIGHLIGHT)
.append(search)
.append(ChatColorType.NORMAL)
.append(" kill count: ")
.append(ChatColorType.HIGHLIGHT)
.append(Integer.toString(kc))
.build();
log.debug("Setting response {}", response);
final MessageNode messageNode = setMessage.getMessageNode();
messageNode.setRuneLiteFormatMessage(response);
chatMessageManager.update(messageNode);
client.refreshChat();
}
/**
* Looks up the item price and changes the original message to the
* response.
*
* @param messageNode The chat message containing the command.
* @param search The item given with the command.
*/
private void itemPriceLookup(MessageNode messageNode, String search)
{
SearchResult result;
try
{
result = itemManager.searchForItem(search);
}
catch (ExecutionException ex)
{
log.warn("Unable to search for item {}", search, ex);
return;
}
if (result != null && !result.getItems().isEmpty())
{
Item item = retrieveFromList(result.getItems(), search);
if (item == null)
{
log.debug("Unable to find item {} in result {}", search, result);
return;
}
int itemId = item.getId();
ItemPrice itemPrice = itemManager.getItemPrice(itemId);
final ChatMessageBuilder builder = new ChatMessageBuilder()
.append(ChatColorType.NORMAL)
.append("Price of ")
.append(ChatColorType.HIGHLIGHT)
.append(item.getName())
.append(ChatColorType.NORMAL)
.append(": GE average ")
.append(ChatColorType.HIGHLIGHT)
.append(StackFormatter.formatNumber(itemPrice.getPrice()));
ItemComposition itemComposition = itemManager.getItemComposition(itemId);
if (itemComposition != null)
{
int alchPrice = Math.round(itemComposition.getPrice() * HIGH_ALCHEMY_CONSTANT);
builder
.append(ChatColorType.NORMAL)
.append(" HA value ")
.append(ChatColorType.HIGHLIGHT)
.append(StackFormatter.formatNumber(alchPrice));
}
String response = builder.build();
log.debug("Setting response {}", response);
messageNode.setRuneLiteFormatMessage(response);
chatMessageManager.update(messageNode);
client.refreshChat();
}
}
/**
* Looks up the player skill and changes the original message to the
* response.
*
* @param setMessage The chat message containing the command.
* @param search The item given with the command.
*/
private void playerSkillLookup(SetMessage setMessage, String search)
{
search = SkillAbbreviations.getFullName(search);
final HiscoreSkill skill;
try
{
skill = HiscoreSkill.valueOf(search.toUpperCase());
}
catch (IllegalArgumentException i)
{
return;
}
final HiscoreLookup lookup = getCorrectLookupFor(setMessage);
try
{
final SingleHiscoreSkillResult result = hiscoreClient.lookup(lookup.getName(), skill, lookup.getEndpoint());
final Skill hiscoreSkill = result.getSkill();
final String response = new ChatMessageBuilder()
.append(ChatColorType.NORMAL)
.append("Level ")
.append(ChatColorType.HIGHLIGHT)
.append(skill.getName()).append(": ").append(String.valueOf(hiscoreSkill.getLevel()))
.append(ChatColorType.NORMAL)
.append(" Experience: ")
.append(ChatColorType.HIGHLIGHT)
.append(String.format("%,d", hiscoreSkill.getExperience()))
.append(ChatColorType.NORMAL)
.append(" Rank: ")
.append(ChatColorType.HIGHLIGHT)
.append(String.format("%,d", hiscoreSkill.getRank()))
.build();
log.debug("Setting response {}", response);
final MessageNode messageNode = setMessage.getMessageNode();
messageNode.setRuneLiteFormatMessage(response);
chatMessageManager.update(messageNode);
client.refreshChat();
}
catch (IOException ex)
{
log.warn("unable to look up skill {} for {}", skill, search, ex);
}
}
private void combatLevelLookup(ChatMessageType type, SetMessage setMessage)
{
String player;
if (type == ChatMessageType.PRIVATE_MESSAGE_SENT)
{
player = client.getLocalPlayer().getName();
}
else
{
player = sanitize(setMessage.getName());
}
try
{
HiscoreResult playerStats = hiscoreClient.lookup(player);
int attack = playerStats.getAttack().getLevel();
int strength = playerStats.getStrength().getLevel();
int defence = playerStats.getDefence().getLevel();
int hitpoints = playerStats.getHitpoints().getLevel();
int ranged = playerStats.getRanged().getLevel();
int prayer = playerStats.getPrayer().getLevel();
int magic = playerStats.getMagic().getLevel();
int combatLevel = Experience.getCombatLevel(attack, strength, defence, hitpoints, magic, ranged, prayer);
String response = new ChatMessageBuilder()
.append(ChatColorType.NORMAL)
.append("Combat Level: ")
.append(ChatColorType.HIGHLIGHT)
.append(String.valueOf(combatLevel))
.append(ChatColorType.NORMAL)
.append(" A: ")
.append(ChatColorType.HIGHLIGHT)
.append(String.valueOf(attack))
.append(ChatColorType.NORMAL)
.append(" S: ")
.append(ChatColorType.HIGHLIGHT)
.append(String.valueOf(strength))
.append(ChatColorType.NORMAL)
.append(" D: ")
.append(ChatColorType.HIGHLIGHT)
.append(String.valueOf(defence))
.append(ChatColorType.NORMAL)
.append(" H: ")
.append(ChatColorType.HIGHLIGHT)
.append(String.valueOf(hitpoints))
.append(ChatColorType.NORMAL)
.append(" R: ")
.append(ChatColorType.HIGHLIGHT)
.append(String.valueOf(ranged))
.append(ChatColorType.NORMAL)
.append(" P: ")
.append(ChatColorType.HIGHLIGHT)
.append(String.valueOf(prayer))
.append(ChatColorType.NORMAL)
.append(" M: ")
.append(ChatColorType.HIGHLIGHT)
.append(String.valueOf(magic))
.build();
log.debug("Setting response {}", response);
final MessageNode messageNode = setMessage.getMessageNode();
messageNode.setRuneLiteFormatMessage(response);
chatMessageManager.update(messageNode);
client.refreshChat();
}
catch (IOException ex)
{
log.warn("Error fetching hiscore data", ex);
}
}
/**
* Looks up the quantities of clues completed
* for the requested clue-level (no arg if requesting total)
* easy, medium, hard, elite, master
*/
private void playerClueLookup(SetMessage setMessage, String search)
{
final HiscoreLookup lookup = getCorrectLookupFor(setMessage);
try
{
final Skill hiscoreSkill;
final HiscoreResult result = hiscoreClient.lookup(lookup.getName(), lookup.getEndpoint());
String level = search.toLowerCase();
switch (level)
{
case "easy":
hiscoreSkill = result.getClueScrollEasy();
break;
case "medium":
hiscoreSkill = result.getClueScrollMedium();
break;
case "hard":
hiscoreSkill = result.getClueScrollHard();
break;
case "elite":
hiscoreSkill = result.getClueScrollElite();
break;
case "master":
hiscoreSkill = result.getClueScrollMaster();
break;
case "total":
hiscoreSkill = result.getClueScrollAll();
break;
default:
return;
}
int quantity = hiscoreSkill.getLevel();
int rank = hiscoreSkill.getRank();
if (quantity == -1)
{
return;
}
ChatMessageBuilder chatMessageBuilder = new ChatMessageBuilder()
.append("Clue scroll (" + level + ")").append(": ")
.append(ChatColorType.HIGHLIGHT)
.append(Integer.toString(quantity));
if (rank != -1)
{
chatMessageBuilder.append(ChatColorType.NORMAL)
.append(" Rank: ")
.append(ChatColorType.HIGHLIGHT)
.append(String.format("%,d", rank));
}
String response = chatMessageBuilder.build();
log.debug("Setting response {}", response);
final MessageNode messageNode = setMessage.getMessageNode();
messageNode.setRuneLiteFormatMessage(response);
chatMessageManager.update(messageNode);
client.refreshChat();
}
catch (IOException ex)
{
log.warn("error looking up clues", ex);
}
}
/**
* Gets correct lookup data for message
*
* @param setMessage chat message
* @return hiscore lookup data
*/
private HiscoreLookup getCorrectLookupFor(final SetMessage setMessage)
{
final String player;
final HiscoreEndpoint ironmanStatus;
if (setMessage.getType().equals(ChatMessageType.PRIVATE_MESSAGE_SENT))
{
player = client.getLocalPlayer().getName();
ironmanStatus = getHiscoreEndpointType();
}
else
{
player = sanitize(setMessage.getName());
if (player.equals(client.getLocalPlayer().getName()))
{
// Get ironman status from for the local player
ironmanStatus = getHiscoreEndpointType();
}
else
{
// Get ironman status from their icon in chat
ironmanStatus = getHiscoreEndpointByName(setMessage.getName());
}
}
return new HiscoreLookup(player, ironmanStatus);
}
/**
* Compares the names of the items in the list with the original input.
* Returns the item if its name is equal to the original input or null
* if it can't find the item.
*
* @param items List of items.
* @param originalInput String with the original input.
* @return Item which has a name equal to the original input.
*/
private Item retrieveFromList(List<Item> items, String originalInput)
{
for (Item item : items)
{
if (item.getName().toLowerCase().equals(originalInput.toLowerCase()))
{
return item;
}
}
return null;
}
/**
* Cleans the ironman status icon from playername string if present and
* corrects spaces.
*
* @param lookup Playername to lookup.
* @return Cleaned playername.
*/
private static String sanitize(String lookup)
{
String cleaned = lookup.contains("<img") ? lookup.substring(lookup.lastIndexOf('>') + 1) : lookup;
return cleaned.replace('\u00A0', ' ');
}
/**
* Looks up the ironman status of the local player. Does NOT work on other players.
*
* @return hiscore endpoint
*/
private HiscoreEndpoint getHiscoreEndpointType()
{
return toEndPoint(client.getAccountType());
}
/**
* Returns the ironman status based on the symbol in the name of the player.
*
* @param name player name
* @return hiscore endpoint
*/
private static HiscoreEndpoint getHiscoreEndpointByName(final String name)
{
if (name.contains("<img=2>"))
{
return toEndPoint(AccountType.IRONMAN);
}
else if (name.contains("<img=3>"))
{
return toEndPoint(AccountType.ULTIMATE_IRONMAN);
}
else if (name.contains("<img=10>"))
{
return toEndPoint(AccountType.HARDCORE_IRONMAN);
}
else
{
return toEndPoint(AccountType.NORMAL);
}
}
/**
* Converts account type to hiscore endpoint
*
* @param accountType account type
* @return hiscore endpoint
*/
private static HiscoreEndpoint toEndPoint(final AccountType accountType)
{
switch (accountType)
{
case IRONMAN:
return HiscoreEndpoint.IRONMAN;
case ULTIMATE_IRONMAN:
return HiscoreEndpoint.ULTIMATE_IRONMAN;
case HARDCORE_IRONMAN:
return HiscoreEndpoint.HARDCORE_IRONMAN;
default:
return HiscoreEndpoint.NORMAL;
}
}
@Value
private static class HiscoreLookup
{
private final String name;
private final HiscoreEndpoint endpoint;
}
private static String longBossName(String boss)
{
switch (boss.toLowerCase())
{
case "corp":
return "Corporeal Beast";
case "jad":
return "TzTok-Jad";
case "kq":
return "Kalphite Queen";
case "chaos ele":
return "Chaos Elemental";
case "dusk":
case "dawn":
case "gargs":
return "Grotesque Guardians";
case "crazy arch":
return "Crazy Archaeologist";
case "deranged arch":
return "Deranged Archaeologist";
case "mole":
return "Giant Mole";
case "vetion":
return "Vet'ion";
case "vene":
return "Venenatis";
case "kbd":
return "King Black Dragon";
case "vork":
return "Vorkath";
case "sire":
return "Abyssal Sire";
case "smoke devil":
case "thermy":
return "Thermonuclear Smoke Devil";
case "cerb":
return "Cerberus";
case "zuk":
case "inferno":
return "TzKal-Zuk";
// gwd
case "sara":
case "saradomin":
case "zilyana":
case "zily":
return "Commander Zilyana";
case "zammy":
case "zamorak":
case "kril":
case "kril trutsaroth":
return "K'ril Tsutsaroth";
case "arma":
case "kree":
case "kreearra":
case "armadyl":
return "Kree'arra";
case "bando":
case "bandos":
case "graardor":
return "General Graardor";
// dks
case "supreme":
return "Dagannoth Supreme";
case "rex":
return "Dagannoth Rex";
case "prime":
return "Dagannoth Prime";
case "wt":
return "Wintertodt";
case "barrows":
return "Barrows Chests";
case "cox":
case "xeric":
case "chambers":
case "olm":
case "raids":
return "Chambers of Xeric";
case "tob":
case "theatre":
case "verzik":
case "verzik vitur":
case "raids 2":
return "Theatre of Blood";
default:
return boss;
}
}
}
| runelite-client/src/main/java/net/runelite/client/plugins/chatcommands/ChatCommandsPlugin.java | /*
* Copyright (c) 2017. l2-
* Copyright (c) 2017, Adam <[email protected]>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.client.plugins.chatcommands;
import com.google.common.eventbus.Subscribe;
import com.google.inject.Provides;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ScheduledExecutorService;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.inject.Inject;
import lombok.Value;
import lombok.extern.slf4j.Slf4j;
import net.runelite.api.ChatMessageType;
import net.runelite.api.Client;
import net.runelite.api.GameState;
import net.runelite.api.ItemComposition;
import net.runelite.api.MessageNode;
import net.runelite.api.events.ChatMessage;
import net.runelite.api.events.GameTick;
import net.runelite.api.events.SetMessage;
import net.runelite.api.events.WidgetLoaded;
import net.runelite.api.vars.AccountType;
import net.runelite.api.widgets.Widget;
import static net.runelite.api.widgets.WidgetID.KILL_LOGS_GROUP_ID;
import net.runelite.api.widgets.WidgetInfo;
import net.runelite.client.chat.ChatColorType;
import net.runelite.client.chat.ChatMessageBuilder;
import net.runelite.client.chat.ChatMessageManager;
import net.runelite.client.chat.ChatboxInputListener;
import net.runelite.client.chat.CommandManager;
import net.runelite.client.config.ConfigManager;
import net.runelite.client.events.ChatboxInput;
import net.runelite.client.events.PrivateMessageInput;
import net.runelite.client.game.ItemManager;
import net.runelite.client.input.KeyManager;
import net.runelite.client.plugins.Plugin;
import net.runelite.client.plugins.PluginDescriptor;
import net.runelite.client.util.StackFormatter;
import net.runelite.http.api.hiscore.HiscoreClient;
import net.runelite.http.api.hiscore.HiscoreEndpoint;
import net.runelite.http.api.hiscore.HiscoreResult;
import net.runelite.http.api.hiscore.HiscoreSkill;
import net.runelite.http.api.hiscore.SingleHiscoreSkillResult;
import net.runelite.http.api.hiscore.Skill;
import net.runelite.http.api.item.Item;
import net.runelite.http.api.item.ItemPrice;
import net.runelite.http.api.item.SearchResult;
import net.runelite.http.api.kc.KillCountClient;
@PluginDescriptor(
name = "Chat Commands",
description = "Enable chat commands",
tags = {"grand", "exchange", "level", "prices"}
)
@Slf4j
public class ChatCommandsPlugin extends Plugin implements ChatboxInputListener
{
private static final float HIGH_ALCHEMY_CONSTANT = 0.6f;
private static final Pattern KILLCOUNT_PATERN = Pattern.compile("Your (.+) kill count is: <col=ff0000>(\\d+)</col>.");
private static final Pattern RAIDS_PATTERN = Pattern.compile("Your completed (.+) count is: <col=ff0000>(\\d+)</col>.");
private static final Pattern WINTERTODT_PATERN = Pattern.compile("Your subdued Wintertodt count is: <col=ff0000>(\\d+)</col>.");
private static final Pattern BARROWS_PATERN = Pattern.compile("Your Barrows chest count is: <col=ff0000>(\\d+)</col>.");
private static final String TOTAL_LEVEL_COMMAND_STRING = "!total";
private static final String PRICE_COMMAND_STRING = "!price";
private static final String LEVEL_COMMAND_STRING = "!lvl";
private static final String CLUES_COMMAND_STRING = "!clues";
private static final String KILLCOUNT_COMMAND_STRING = "!kc";
private final HiscoreClient hiscoreClient = new HiscoreClient();
private final KillCountClient killCountClient = new KillCountClient();
private boolean logKills;
@Inject
private Client client;
@Inject
private ChatCommandsConfig config;
@Inject
private ConfigManager configManager;
@Inject
private ItemManager itemManager;
@Inject
private ChatMessageManager chatMessageManager;
@Inject
private ScheduledExecutorService executor;
@Inject
private KeyManager keyManager;
@Inject
private ChatKeyboardListener chatKeyboardListener;
@Inject
private CommandManager commandManager;
@Override
public void startUp()
{
keyManager.registerKeyListener(chatKeyboardListener);
commandManager.register(this);
}
@Override
public void shutDown()
{
keyManager.unregisterKeyListener(chatKeyboardListener);
commandManager.unregister(this);
}
@Provides
ChatCommandsConfig provideConfig(ConfigManager configManager)
{
return configManager.getConfig(ChatCommandsConfig.class);
}
private void setKc(String boss, int killcount)
{
configManager.setConfiguration("killcount." + client.getUsername().toLowerCase(),
boss.toLowerCase(), killcount);
}
private int getKc(String boss)
{
Integer killCount = configManager.getConfiguration("killcount." + client.getUsername().toLowerCase(),
boss.toLowerCase(), int.class);
return killCount == null ? 0 : killCount;
}
/**
* Checks if the chat message is a command.
*
* @param setMessage The chat message.
*/
@Subscribe
public void onSetMessage(SetMessage setMessage)
{
if (client.getGameState() != GameState.LOGGED_IN)
{
return;
}
switch (setMessage.getType())
{
case PUBLIC:
case PUBLIC_MOD:
case CLANCHAT:
case PRIVATE_MESSAGE_RECEIVED:
case PRIVATE_MESSAGE_SENT:
break;
default:
return;
}
String message = setMessage.getValue();
MessageNode messageNode = setMessage.getMessageNode();
// clear RuneLite formatted message as the message node is
// being reused
messageNode.setRuneLiteFormatMessage(null);
if (config.lvl() && message.toLowerCase().equals(TOTAL_LEVEL_COMMAND_STRING))
{
log.debug("Running total level lookup");
executor.submit(() -> playerSkillLookup(setMessage, "total"));
}
else if (config.price() && message.toLowerCase().startsWith(PRICE_COMMAND_STRING + " "))
{
String search = message.substring(PRICE_COMMAND_STRING.length() + 1);
log.debug("Running price lookup for {}", search);
executor.submit(() -> itemPriceLookup(setMessage.getMessageNode(), search));
}
else if (config.lvl() && message.toLowerCase().startsWith(LEVEL_COMMAND_STRING + " "))
{
String search = message.substring(LEVEL_COMMAND_STRING.length() + 1);
log.debug("Running level lookup for {}", search);
executor.submit(() -> playerSkillLookup(setMessage, search));
}
else if (config.clue() && message.toLowerCase().equals(CLUES_COMMAND_STRING))
{
log.debug("Running lookup for overall clues");
executor.submit(() -> playerClueLookup(setMessage, "total"));
}
else if (config.clue() && message.toLowerCase().startsWith(CLUES_COMMAND_STRING + " "))
{
String search = message.substring(CLUES_COMMAND_STRING.length() + 1);
log.debug("Running clue lookup for {}", search);
executor.submit(() -> playerClueLookup(setMessage, search));
}
else if (config.killcount() && message.toLowerCase().startsWith(KILLCOUNT_COMMAND_STRING + " "))
{
String search = message.substring(KILLCOUNT_COMMAND_STRING.length() + 1);
log.debug("Running killcount lookup for {}", search);
executor.submit(() -> killCountLookup(setMessage.getType(), setMessage, search));
}
}
@Subscribe
public void onChatMessage(ChatMessage chatMessage)
{
if (chatMessage.getType() != ChatMessageType.SERVER && chatMessage.getType() != ChatMessageType.FILTERED)
{
return;
}
String message = chatMessage.getMessage();
Matcher matcher = KILLCOUNT_PATERN.matcher(message);
if (matcher.find())
{
String boss = matcher.group(1);
int kc = Integer.parseInt(matcher.group(2));
setKc(boss, kc);
}
matcher = WINTERTODT_PATERN.matcher(message);
if (matcher.find())
{
int kc = Integer.parseInt(matcher.group(1));
setKc("Wintertodt", kc);
}
matcher = RAIDS_PATTERN.matcher(message);
if (matcher.find())
{
String boss = matcher.group(1);
int kc = Integer.parseInt(matcher.group(2));
setKc(boss, kc);
}
matcher = BARROWS_PATERN.matcher(message);
if (matcher.find())
{
int kc = Integer.parseInt(matcher.group(1));
setKc("Barrows", kc);
}
}
@Subscribe
public void onGameTick(GameTick event)
{
if (!logKills)
{
return;
}
logKills = false;
Widget title = client.getWidget(WidgetInfo.KILL_LOG_TITLE);
Widget bossMonster = client.getWidget(WidgetInfo.KILL_LOG_MONSTER);
Widget bossKills = client.getWidget(WidgetInfo.KILL_LOG_KILLS);
if (title == null || bossMonster == null || bossKills == null
|| !"Boss Kill Log".equals(title.getText()))
{
return;
}
Widget[] bossChildren = bossMonster.getChildren();
Widget[] killsChildren = bossKills.getChildren();
for (int i = 0; i < bossChildren.length; ++i)
{
Widget boss = bossChildren[i];
Widget kill = killsChildren[i];
String bossName = boss.getText();
int kc = Integer.parseInt(kill.getText().replace(",", ""));
if (kc != getKc(bossName))
{
setKc(bossName, kc);
}
}
}
@Subscribe
public void onWidgetLoaded(WidgetLoaded widget)
{
// don't load kc if in an instance, if the player is in another players poh
// and reading their boss log
if (widget.getGroupId() != KILL_LOGS_GROUP_ID || client.isInInstancedRegion())
{
return;
}
logKills = true;
}
@Override
public boolean onChatboxInput(ChatboxInput chatboxInput)
{
final String value = chatboxInput.getValue();
if (!config.killcount() || !value.startsWith("!kc ") && !value.startsWith("/!kc "))
{
return false;
}
int idx = value.indexOf(' ');
final String boss = longBossName(value.substring(idx + 1));
final int kc = getKc(boss);
if (kc <= 0)
{
return false;
}
final String playerName = client.getLocalPlayer().getName();
executor.execute(() ->
{
try
{
killCountClient.submit(playerName, boss, kc);
}
catch (Exception ex)
{
log.warn("unable to submit killcount", ex);
}
finally
{
chatboxInput.resume();
}
});
return true;
}
@Override
public boolean onPrivateMessageInput(PrivateMessageInput privateMessageInput)
{
final String message = privateMessageInput.getMessage();
if (!config.killcount() || !message.startsWith("!kc "))
{
return false;
}
int idx = message.indexOf(' ');
final String boss = longBossName(message.substring(idx + 1));
final int kc = getKc(boss);
if (kc <= 0)
{
return false;
}
final String playerName = client.getLocalPlayer().getName();
executor.execute(() ->
{
try
{
killCountClient.submit(playerName, boss, kc);
}
catch (Exception ex)
{
log.warn("unable to submit killcount", ex);
}
finally
{
privateMessageInput.resume();
}
});
return true;
}
private void killCountLookup(ChatMessageType type, SetMessage setMessage, String search)
{
final String player;
if (type.equals(ChatMessageType.PRIVATE_MESSAGE_SENT))
{
player = client.getLocalPlayer().getName();
}
else
{
player = sanitize(setMessage.getName());
}
search = longBossName(search);
final int kc;
try
{
kc = killCountClient.get(player, search);
}
catch (IOException ex)
{
log.debug("unable to lookup killcount", ex);
return;
}
String response = new ChatMessageBuilder()
.append(ChatColorType.HIGHLIGHT)
.append(search)
.append(ChatColorType.NORMAL)
.append(" kill count: ")
.append(ChatColorType.HIGHLIGHT)
.append(Integer.toString(kc))
.build();
log.debug("Setting response {}", response);
final MessageNode messageNode = setMessage.getMessageNode();
messageNode.setRuneLiteFormatMessage(response);
chatMessageManager.update(messageNode);
client.refreshChat();
}
/**
* Looks up the item price and changes the original message to the
* response.
*
* @param messageNode The chat message containing the command.
* @param search The item given with the command.
*/
private void itemPriceLookup(MessageNode messageNode, String search)
{
SearchResult result;
try
{
result = itemManager.searchForItem(search);
}
catch (ExecutionException ex)
{
log.warn("Unable to search for item {}", search, ex);
return;
}
if (result != null && !result.getItems().isEmpty())
{
Item item = retrieveFromList(result.getItems(), search);
if (item == null)
{
log.debug("Unable to find item {} in result {}", search, result);
return;
}
int itemId = item.getId();
ItemPrice itemPrice = itemManager.getItemPrice(itemId);
final ChatMessageBuilder builder = new ChatMessageBuilder()
.append(ChatColorType.NORMAL)
.append("Price of ")
.append(ChatColorType.HIGHLIGHT)
.append(item.getName())
.append(ChatColorType.NORMAL)
.append(": GE average ")
.append(ChatColorType.HIGHLIGHT)
.append(StackFormatter.formatNumber(itemPrice.getPrice()));
ItemComposition itemComposition = itemManager.getItemComposition(itemId);
if (itemComposition != null)
{
int alchPrice = Math.round(itemComposition.getPrice() * HIGH_ALCHEMY_CONSTANT);
builder
.append(ChatColorType.NORMAL)
.append(" HA value ")
.append(ChatColorType.HIGHLIGHT)
.append(StackFormatter.formatNumber(alchPrice));
}
String response = builder.build();
log.debug("Setting response {}", response);
messageNode.setRuneLiteFormatMessage(response);
chatMessageManager.update(messageNode);
client.refreshChat();
}
}
/**
* Looks up the player skill and changes the original message to the
* response.
*
* @param setMessage The chat message containing the command.
* @param search The item given with the command.
*/
private void playerSkillLookup(SetMessage setMessage, String search)
{
search = SkillAbbreviations.getFullName(search);
final HiscoreSkill skill;
try
{
skill = HiscoreSkill.valueOf(search.toUpperCase());
}
catch (IllegalArgumentException i)
{
return;
}
final HiscoreLookup lookup = getCorrectLookupFor(setMessage);
try
{
final SingleHiscoreSkillResult result = hiscoreClient.lookup(lookup.getName(), skill, lookup.getEndpoint());
final Skill hiscoreSkill = result.getSkill();
final String response = new ChatMessageBuilder()
.append(ChatColorType.NORMAL)
.append("Level ")
.append(ChatColorType.HIGHLIGHT)
.append(skill.getName()).append(": ").append(String.valueOf(hiscoreSkill.getLevel()))
.append(ChatColorType.NORMAL)
.append(" Experience: ")
.append(ChatColorType.HIGHLIGHT)
.append(String.format("%,d", hiscoreSkill.getExperience()))
.append(ChatColorType.NORMAL)
.append(" Rank: ")
.append(ChatColorType.HIGHLIGHT)
.append(String.format("%,d", hiscoreSkill.getRank()))
.build();
log.debug("Setting response {}", response);
final MessageNode messageNode = setMessage.getMessageNode();
messageNode.setRuneLiteFormatMessage(response);
chatMessageManager.update(messageNode);
client.refreshChat();
}
catch (IOException ex)
{
log.warn("unable to look up skill {} for {}", skill, search, ex);
}
}
/**
* Looks up the quantities of clues completed
* for the requested clue-level (no arg if requesting total)
* easy, medium, hard, elite, master
*/
private void playerClueLookup(SetMessage setMessage, String search)
{
final HiscoreLookup lookup = getCorrectLookupFor(setMessage);
try
{
final Skill hiscoreSkill;
final HiscoreResult result = hiscoreClient.lookup(lookup.getName(), lookup.getEndpoint());
String level = search.toLowerCase();
switch (level)
{
case "easy":
hiscoreSkill = result.getClueScrollEasy();
break;
case "medium":
hiscoreSkill = result.getClueScrollMedium();
break;
case "hard":
hiscoreSkill = result.getClueScrollHard();
break;
case "elite":
hiscoreSkill = result.getClueScrollElite();
break;
case "master":
hiscoreSkill = result.getClueScrollMaster();
break;
case "total":
hiscoreSkill = result.getClueScrollAll();
break;
default:
return;
}
int quantity = hiscoreSkill.getLevel();
int rank = hiscoreSkill.getRank();
if (quantity == -1)
{
return;
}
ChatMessageBuilder chatMessageBuilder = new ChatMessageBuilder()
.append("Clue scroll (" + level + ")").append(": ")
.append(ChatColorType.HIGHLIGHT)
.append(Integer.toString(quantity));
if (rank != -1)
{
chatMessageBuilder.append(ChatColorType.NORMAL)
.append(" Rank: ")
.append(ChatColorType.HIGHLIGHT)
.append(String.format("%,d", rank));
}
String response = chatMessageBuilder.build();
log.debug("Setting response {}", response);
final MessageNode messageNode = setMessage.getMessageNode();
messageNode.setRuneLiteFormatMessage(response);
chatMessageManager.update(messageNode);
client.refreshChat();
}
catch (IOException ex)
{
log.warn("error looking up clues", ex);
}
}
/**
* Gets correct lookup data for message
*
* @param setMessage chat message
* @return hiscore lookup data
*/
private HiscoreLookup getCorrectLookupFor(final SetMessage setMessage)
{
final String player;
final HiscoreEndpoint ironmanStatus;
if (setMessage.getType().equals(ChatMessageType.PRIVATE_MESSAGE_SENT))
{
player = client.getLocalPlayer().getName();
ironmanStatus = getHiscoreEndpointType();
}
else
{
player = sanitize(setMessage.getName());
if (player.equals(client.getLocalPlayer().getName()))
{
// Get ironman status from for the local player
ironmanStatus = getHiscoreEndpointType();
}
else
{
// Get ironman status from their icon in chat
ironmanStatus = getHiscoreEndpointByName(setMessage.getName());
}
}
return new HiscoreLookup(player, ironmanStatus);
}
/**
* Compares the names of the items in the list with the original input.
* Returns the item if its name is equal to the original input or null
* if it can't find the item.
*
* @param items List of items.
* @param originalInput String with the original input.
* @return Item which has a name equal to the original input.
*/
private Item retrieveFromList(List<Item> items, String originalInput)
{
for (Item item : items)
{
if (item.getName().toLowerCase().equals(originalInput.toLowerCase()))
{
return item;
}
}
return null;
}
/**
* Cleans the ironman status icon from playername string if present and
* corrects spaces.
*
* @param lookup Playername to lookup.
* @return Cleaned playername.
*/
private static String sanitize(String lookup)
{
String cleaned = lookup.contains("<img") ? lookup.substring(lookup.lastIndexOf('>') + 1) : lookup;
return cleaned.replace('\u00A0', ' ');
}
/**
* Looks up the ironman status of the local player. Does NOT work on other players.
*
* @return hiscore endpoint
*/
private HiscoreEndpoint getHiscoreEndpointType()
{
return toEndPoint(client.getAccountType());
}
/**
* Returns the ironman status based on the symbol in the name of the player.
*
* @param name player name
* @return hiscore endpoint
*/
private static HiscoreEndpoint getHiscoreEndpointByName(final String name)
{
if (name.contains("<img=2>"))
{
return toEndPoint(AccountType.IRONMAN);
}
else if (name.contains("<img=3>"))
{
return toEndPoint(AccountType.ULTIMATE_IRONMAN);
}
else if (name.contains("<img=10>"))
{
return toEndPoint(AccountType.HARDCORE_IRONMAN);
}
else
{
return toEndPoint(AccountType.NORMAL);
}
}
/**
* Converts account type to hiscore endpoint
*
* @param accountType account type
* @return hiscore endpoint
*/
private static HiscoreEndpoint toEndPoint(final AccountType accountType)
{
switch (accountType)
{
case IRONMAN:
return HiscoreEndpoint.IRONMAN;
case ULTIMATE_IRONMAN:
return HiscoreEndpoint.ULTIMATE_IRONMAN;
case HARDCORE_IRONMAN:
return HiscoreEndpoint.HARDCORE_IRONMAN;
default:
return HiscoreEndpoint.NORMAL;
}
}
@Value
private static class HiscoreLookup
{
private final String name;
private final HiscoreEndpoint endpoint;
}
private static String longBossName(String boss)
{
switch (boss.toLowerCase())
{
case "corp":
return "Corporeal Beast";
case "jad":
return "TzTok-Jad";
case "kq":
return "Kalphite Queen";
case "chaos ele":
return "Chaos Elemental";
case "dusk":
case "dawn":
case "gargs":
return "Grotesque Guardians";
case "crazy arch":
return "Crazy Archaeologist";
case "deranged arch":
return "Deranged Archaeologist";
case "mole":
return "Giant Mole";
case "vetion":
return "Vet'ion";
case "vene":
return "Venenatis";
case "kbd":
return "King Black Dragon";
case "vork":
return "Vorkath";
case "sire":
return "Abyssal Sire";
case "smoke devil":
case "thermy":
return "Thermonuclear Smoke Devil";
case "cerb":
return "Cerberus";
case "zuk":
case "inferno":
return "TzKal-Zuk";
// gwd
case "sara":
case "saradomin":
case "zilyana":
case "zily":
return "Commander Zilyana";
case "zammy":
case "zamorak":
case "kril":
case "kril trutsaroth":
return "K'ril Tsutsaroth";
case "arma":
case "kree":
case "kreearra":
case "armadyl":
return "Kree'arra";
case "bando":
case "bandos":
case "graardor":
return "General Graardor";
// dks
case "supreme":
return "Dagannoth Supreme";
case "rex":
return "Dagannoth Rex";
case "prime":
return "Dagannoth Prime";
case "wt":
return "Wintertodt";
case "barrows":
return "Barrows Chests";
case "cox":
case "xeric":
case "chambers":
case "olm":
case "raids":
return "Chambers of Xeric";
case "tob":
case "theatre":
case "verzik":
case "verzik vitur":
case "raids 2":
return "Theatre of Blood";
default:
return boss;
}
}
}
| chat commands: add !cmb command
Co-authored-by: Magic fTail <[email protected]>
| runelite-client/src/main/java/net/runelite/client/plugins/chatcommands/ChatCommandsPlugin.java | chat commands: add !cmb command |
|
Java | mit | c40daf62b282a45b7cef4b7b5de3383f53d2bc9d | 0 | jontejj/malmo-improv | package com.malmoimprov;
import java.io.IOException;
import java.io.StringWriter;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.FilterConfig;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebFilter;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.vaadin.crudui.crud.impl.GridBasedCrudComponent;
import com.google.appengine.api.users.User;
import com.google.appengine.api.users.UserService;
import com.google.appengine.api.users.UserServiceFactory;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.gson.Gson;
import com.google.gson.JsonIOException;
import com.google.schemaorg.JsonLdSerializer;
import com.google.schemaorg.JsonLdSyntaxException;
import com.google.schemaorg.core.CoreFactory;
import com.google.schemaorg.core.EventReservation;
import com.google.schemaorg.core.ReservationStatusTypeEnum;
import com.googlecode.objectify.Key;
import com.googlecode.objectify.Objectify;
import com.googlecode.objectify.ObjectifyFilter;
import com.googlecode.objectify.ObjectifyService;
import com.googlecode.objectify.VoidWork;
import com.googlecode.objectify.util.Closeable;
import com.sendgrid.Content;
import com.sendgrid.Email;
import com.sendgrid.Mail;
import com.sendgrid.Method;
import com.sendgrid.Request;
import com.sendgrid.Response;
import com.sendgrid.SendGrid;
import com.vaadin.annotations.Theme;
import com.vaadin.annotations.VaadinServletConfiguration;
import com.vaadin.annotations.Viewport;
import com.vaadin.data.BeanValidationBinder;
import com.vaadin.data.Binder;
import com.vaadin.data.ValidationException;
import com.vaadin.data.converter.StringToIntegerConverter;
import com.vaadin.icons.VaadinIcons;
import com.vaadin.server.ExternalResource;
import com.vaadin.server.GAEVaadinServlet;
import com.vaadin.server.Responsive;
import com.vaadin.server.Sizeable.Unit;
import com.vaadin.server.VaadinRequest;
import com.vaadin.server.WebBrowser;
import com.vaadin.shared.ui.ContentMode;
import com.vaadin.ui.Button;
import com.vaadin.ui.Component;
import com.vaadin.ui.ComponentContainer;
import com.vaadin.ui.FormLayout;
import com.vaadin.ui.Grid;
import com.vaadin.ui.Grid.SelectionMode;
import com.vaadin.ui.Image;
import com.vaadin.ui.Label;
import com.vaadin.ui.Link;
import com.vaadin.ui.RadioButtonGroup;
import com.vaadin.ui.TextField;
import com.vaadin.ui.UI;
import com.vaadin.ui.VerticalLayout;
import com.vaadin.ui.renderers.ButtonRenderer;
import freemarker.template.Configuration;
import freemarker.template.Template;
import freemarker.template.TemplateException;
import freemarker.template.TemplateExceptionHandler;
/**
* This UI is the application entry point. A UI may either represent a browser window
* (or tab) or some part of a html page where a Vaadin application is embedded.
* <p>
* The UI is initialized using {@link #init(VaadinRequest)}. This method is intended to be
* overridden to add component to the user interface and initialize non-component functionality.
*/
@Theme("mytheme")
@Viewport("initial-scale=1.0, width=device-width")
public class MyUI extends UI
{
private static final long serialVersionUID = 1L;
private static final Logger LOG = LoggerFactory.getLogger(MyUI.class);
private static final String CONFIG_KEY_SENDGRID = "SENDGRID";
static final long EVENT_ID = 26;
private static final String CURRENCY = "SEK";
private static final String PHONENUMBER_TO_PAY_TO = "0764088570";
private static final long initialSeatCapacity = 22;
private static final BigDecimal ticketPrice = new BigDecimal("50");
private static final BigDecimal memberPricePercentage = new BigDecimal("0.80");
// private static final BigDecimal folkUniPricePercentage = new BigDecimal("0.60");
private static final String facebookEventUrl = "https://www.facebook.com/events/1332415923883641";
/**
* https://console.cloud.google.com/storage/browser?folder=&organizationId=&project=malmo-improv
*/
private static final String imageLink = "https://storage.googleapis.com/malmo-improv.appspot.com/events/TimeCapsule.png";
static final String eventName = "Time Capsule - Improv Show";
private static final com.google.schemaorg.core.Event event = CoreFactory.newTheaterEventBuilder() //
.addUrl(facebookEventUrl) //
.addName(eventName) //
.addImage(imageLink) //
.addOrganizer("Malmö Improvisatorium") //
.addStartDate("2021-10-13T18:00:00+01:00") //
.addDuration("PT1H30M") //
.addLocation(CoreFactory.newPlaceBuilder() //
.addName("MAF, scen 2") //
.addAddress(CoreFactory.newPostalAddressBuilder() //
.addStreetAddress("N. Skolgatan 10A") //
.addAddressLocality("Malmö") //
.addAddressRegion("SE-M") //
.addPostalCode("21153") //
.addAddressCountry("SE"))) //
.addProperty("phoneNumber", PHONENUMBER_TO_PAY_TO).build();
private static final Configuration cfg;
static
{
// SLF4JBridgeHandler.install();
cfg = new Configuration(Configuration.VERSION_2_3_25);
cfg.setClassForTemplateLoading(MyUI.class, "/email-templates/");
cfg.setDefaultEncoding("UTF-8");
cfg.setTemplateExceptionHandler(TemplateExceptionHandler.RETHROW_HANDLER);
cfg.setLogTemplateExceptions(false);
}
private Binder<Reservation> binder;
@Override
protected void init(VaadinRequest vaadinRequest)
{
binder = new BeanValidationBinder<>(Reservation.class);
final VerticalLayout page = new VerticalLayout();
Image banner = new Image("", new ExternalResource(imageLink));
// banner.setSizeFull();
banner.setWidth(30, Unit.PERCENTAGE);
// banner.addStyleName("jonatan");
// banner.setWidth(800, Unit.PIXELS);
page.addComponent(banner);
SeatsRemaining seatsRemaining = loadSeatsRemaining(ObjectifyService.ofy());
if(vaadinRequest.getParameter("admin") != null)
{
UserService userService = UserServiceFactory.getUserService();
if(userService.isUserLoggedIn())
{
User currentUser = userService.getCurrentUser();
ImmutableSet<String> admins = ImmutableSet.of("[email protected]", "[email protected]", "[email protected]");
boolean isAdmin = admins.contains(currentUser.getEmail().toLowerCase());
if(isAdmin)
{
loggedInPage(page);
}
else
{
page.addComponent(new Label("Your user is not authorized to manage reservations"));
}
page.addComponent(new Link("Logout", new ExternalResource(userService.createLogoutURL("//"))));
}
else
{
page.addComponent(new Link("Login", new ExternalResource(userService.createLoginURL("/?admin"))));
}
}
else if(seatsRemaining.getSeatsRemaining() > 0)
{
page.addComponent(step1(page, seatsRemaining));
}
else
{
page.addComponent(fullyBooked());
}
Responsive.makeResponsive(page);// TODO doesn't work?
// setContent(banner);
setContent(page);
}
private void loggedInPage(final VerticalLayout page)
{
GridBasedCrudComponent<Reservation> reservations = new GridBasedCrudComponent<>(Reservation.class);
reservations.getCrudFormFactory().setUseBeanValidation(true);
reservations.setUpdateOperation(updatedReservation -> {
Objectify ofy = ObjectifyService.ofy();
Reservation oldReservation = ofy.load().entity(updatedReservation).now();
List<Object> entititesToUpdate = Lists.newArrayList(updatedReservation);
if(!oldReservation.getCancelled() && updatedReservation.getCancelled())
{
SeatsRemaining seatsRemainingCheck = loadSeatsRemaining(ofy);
seatsRemainingCheck.setSeatsRemaining(seatsRemainingCheck.getSeatsRemaining() + updatedReservation.getNrOfSeats());
entititesToUpdate.add(seatsRemainingCheck);
}
ofy.save().entities(entititesToUpdate).now();
return updatedReservation;
});
reservations.setFindAllOperation(() -> ObjectifyService.ofy().load().type(Reservation.class).filter("eventId = ", EVENT_ID).list());
page.addComponent(reservations);
page.addComponent(new Button("Send 10 event reminders", (e) -> {
sendEventReminders(page, 10);
}));
page.addComponent(new Button("Send 10 event confirmations to people who paid", (e) -> {
sendEventConfirmations(page, 10);
}));
page.addComponent(new Button("Take attendance", (e) -> {
attendanceList(page);
}));
// page.addComponent(new Button("Migrate reservations", (e) -> {
// migrateReservations();
// }));
// page.addComponent(new Button("Create new event", (e) -> {
// page.addComponent(EventCreationPage.form());
// }));
}
private void attendanceList(VerticalLayout page)
{
List<Reservation> list = ObjectifyService.ofy().load().type(Reservation.class).filter("eventId = ", EVENT_ID).filter("cancelled =", false)
.list();
// Have some data
// Create a grid bound to the list
Grid<Reservation> grid = new Grid<>();
grid.setWidth(100, Unit.PERCENTAGE);
grid.setItems(list);
grid.addColumn(Reservation::getName).setCaption("Name");
grid.addColumn(Reservation::getPhone).setCaption("Phone");
grid.addColumn(Reservation::getNrOfSeats).setCaption("Seats");
grid.addColumn(MyUI::priceToPay).setCaption("Price");
grid.addColumn(Reservation::getPaid).setCaption("Paid");
grid.setSelectionMode(SelectionMode.NONE);
// grid.addColumn(Reservation::getAttended).setCaption("Attended");
grid.addColumn(person -> "Attended", new ButtonRenderer<Reservation>(clickEvent -> {
Reservation attended = clickEvent.getItem();
attended.setAttended(true);
ObjectifyService.ofy().save().entities(attended).now();
list.remove(attended);
grid.setItems(list);
}));
BigDecimal totalPrepaid = list.stream().filter(r -> r.getPaid()).map(MyUI::priceToPay).reduce(BigDecimal.ZERO, BigDecimal::add);
page.addComponent(grid);
page.addComponent(new Label("Total prepaid: " + totalPrepaid));
}
private void sendEventConfirmations(VerticalLayout page, int nrOfConfirmationsToSend)
{
Objectify ofy = ObjectifyService.ofy();
// List<Reservation> list = ofy.load().type(Reservation.class).list();
// list.forEach(r -> {
// // Test email
// if(r.getEmail().equals("[email protected]"))
// {
// sendConfirmationEmail(r);
// Label label = new Label(r.toString());
// label.setWidth(100, Unit.PERCENTAGE);
// page.addComponent(label);
// }
// });
// return;
List<Reservation> list = ofy.load().type(Reservation.class).filter("eventId = ", EVENT_ID).filter("cancelled =", false)
.filter("sentConfirmationAboutEvent =", false).filter("paid =", true).limit(nrOfConfirmationsToSend).list();
list.forEach(r -> {
sendConfirmationEmail(r);
r.setSentConfirmationAboutEvent(true);
ofy.save().entities(r).now();
});
String reservationsAsStr = list.toString();
Label label = new Label(reservationsAsStr);
label.setWidth(100, Unit.PERCENTAGE);
page.addComponent(label);
}
private void migrateReservations()
{
Objectify ofy = ObjectifyService.ofy();
List<Reservation> list = ofy.load().type(Reservation.class).list();
ofy.save().entities(list).now();
}
private void sendEventReminders(final VerticalLayout page, int nrOfRemindersToSend)
{
Objectify ofy = ObjectifyService.ofy();
/*
* List<Reservation> list = ofy.load().type(Reservation.class).list();
* list.forEach(r ->
* {
* //Test email
* if(r.getEmail().equals("[email protected]"))
* {
* sendReminderEmail(r);
* Label label = new Label(r.toString());
* label.setWidth(100, Unit.PERCENTAGE);
* page.addComponent(label);
* }
* }
*/
List<Reservation> list = ofy.load().type(Reservation.class).filter("eventId = ", EVENT_ID).filter("cancelled =", false)
.filter("sentReminderAboutEvent =", false).limit(nrOfRemindersToSend).list();
list.forEach(r -> {
sendReminderEmail(r);
r.setSentReminderAboutEvent(true);
ofy.save().entities(r).now();
});
String reservationsAsStr = list.toString();
Label label = new Label(reservationsAsStr);
label.setWidth(100, Unit.PERCENTAGE);
page.addComponent(label);
}
private SeatsRemaining loadSeatsRemaining(Objectify ofy)
{
return ofy.load().key(Key.create(SeatsRemaining.class, "" + EVENT_ID)).now();
}
private static String loadConfig(String key)
{
Config configEntry = ObjectifyService.ofy().load().key(Key.create(Config.class, key)).now();
if(configEntry == null)
throw new IllegalStateException("No config for key: " + key);
return configEntry.getValue();
}
private VerticalLayout fullyBooked()
{
final VerticalLayout fullyBooked = new VerticalLayout();
Label text = new Label(eventName + " sold out. Better luck next time!");
text.addStyleName("small");
text.setWidth(100, Unit.PERCENTAGE);
fullyBooked.addComponent(text);
// Image banner = new Image("", new ClassResource("/contagious-soldout.jpg"));
// banner.addStyleName("jonatan");
// banner.setWidth(100, Unit.PERCENTAGE);
Link facebookLink = new Link("Follow us on facebook for future events!", new ExternalResource("https://www.facebook.com/improvisatorium/"));
facebookLink.setIcon(VaadinIcons.FACEBOOK_SQUARE);
facebookLink.addStyleName("small");
facebookLink.setTargetName("_blank");
// fullyBooked.addComponents(text, banner, facebookLink);
fullyBooked.addComponents(text, facebookLink);
return fullyBooked;
}
private Component step1(final VerticalLayout page, SeatsRemaining seatsRemaining)
{
final VerticalLayout step1Container = new VerticalLayout();
String defaultDiscountType = "Normal";
final Label instructions = new Label("Step 1/2: Reserve your seats for <b>" + eventName + "</b> by filling in your details here:",
ContentMode.HTML);
instructions.setWidth(100, Unit.PERCENTAGE);
int defaultNrOfSeats = 1;
final Label price = new Label(priceDescription(defaultNrOfSeats, defaultDiscountType));
final FormLayout step1 = new FormLayout();
final TextField nrOfSeats = new TextField();
nrOfSeats.setValue("" + defaultNrOfSeats);
nrOfSeats.setCaptionAsHtml(true);
StringBuilder caption = new StringBuilder("Nr of seats to reserve <br/>(max 5)");
if(seatsRemaining.getSeatsRemaining() <= 10)
{
caption.append(" (" + seatsRemaining.getSeatsRemaining() + " remaining)");
}
nrOfSeats.setCaption(caption.toString());
nrOfSeats.setRequiredIndicatorVisible(true);
binder.forField(nrOfSeats).withConverter(new StringToIntegerConverter("Invalid nr of seats")).bind("nrOfSeats");
RadioButtonGroup<String> discounts = new RadioButtonGroup<>("Discounts");
// "Folk Universitetet"
discounts.setItems("Normal", "MAF-member", "Student");
discounts.setSelectedItem(defaultDiscountType);
binder.forField(discounts).bind("discount");
Button reserveButton = new Button("Reserve seats");
reserveButton.setEnabled(false);
binder.addValueChangeListener((e) -> {
reserveButton.setEnabled(binder.isValid());
try
{
price.setValue(priceDescription(Long.parseLong(nrOfSeats.getValue()), discounts.getSelectedItem().get()));
price.markAsDirty();
}
catch(NumberFormatException invalid)
{
}
});
reserveButton.addClickListener(e -> {
reserveButtonClicked(page, step1Container);
});
final TextField name = new TextField();
name.setCaption("Name:");
name.setRequiredIndicatorVisible(true);
binder.forField(name).bind("name");
final TextField email = new TextField();
email.setCaption("Email:");
email.setRequiredIndicatorVisible(true);
binder.forField(email).bind("email");
final TextField phone = new TextField();
phone.setCaption("Phone:");
phone.setRequiredIndicatorVisible(true);
binder.forField(phone).bind("phone");
step1.addComponents(name, email, phone, discounts, nrOfSeats, reserveButton);
step1Container.addComponents(instructions, step1, price);
return step1Container;
}
private String priceDescription(long nrOfSeats, String defaultDiscountType)
{
return "Total ticket price: " + priceToPay(nrOfSeats, defaultDiscountType) + " " + CURRENCY;
}
private void reserveButtonClicked(final ComponentContainer page, final Component step1)
{
Reservation reservation = new Reservation();
try
{
binder.writeBean(reservation);
}
catch(ValidationException e1)
{
throw new RuntimeException("server side validation failed after client validation passed, forgot to add a UI component?", e1);
}
WebBrowser webBrowser = UI.getCurrent().getPage().getWebBrowser();
reservation.setCreationTime(webBrowser.getCurrentDate());
reservation.setEventId(EVENT_ID);
Objectify ofy = ObjectifyService.ofy();
SeatsRemaining seatsRemainingCheck = loadSeatsRemaining(ofy);
if(seatsRemainingCheck.getSeatsRemaining() < reservation.getNrOfSeats())
throw new RuntimeException(
"Got booked while you were entering your data. Only " + seatsRemainingCheck.getSeatsRemaining() + " seats are now remaining.");
seatsRemainingCheck.setSeatsRemaining(seatsRemainingCheck.getSeatsRemaining() - reservation.getNrOfSeats());
Map<Key<Object>, Object> savedData = ofy.save().entities(seatsRemainingCheck, reservation).now();
reservation = (Reservation) savedData.get(Key.create(reservation));
sendStep1ConfirmationEmail(reservation);
final VerticalLayout step2 = new VerticalLayout();
BigDecimal priceToPay = priceToPay(reservation.getNrOfSeats(), reservation.getDiscount());
Label instructions2 = new Label("<b>Step 2/2</b>: Swish " + priceToPay.longValue() + " " + CURRENCY + " to " + PHONENUMBER_TO_PAY_TO
+ " to finalize your reservation. <br/><b>Note:</b>Your tickets are reserved for 3 days. Please remember to buy them via swish to finish the booking.",
ContentMode.HTML);
step2.addComponents(new Label(
"Thanks " + reservation.getName() + ", your reservation of " + reservation.getNrOfSeats()
+ " seat(s) is noted! Your reservation number is " + reservation.id + ".<br/>An email confirmation has been sent to "
+ reservation.getEmail() + ". <br/><br/>",
ContentMode.HTML), instructions2);
Link facebookLink = new Link("Remember to also sign up for the event on facebook!", new ExternalResource(facebookEventUrl));
facebookLink.setIcon(VaadinIcons.FACEBOOK_SQUARE);
facebookLink.setTargetName("_blank");
step2.addComponent(facebookLink);
page.removeComponent(step1);
page.addComponent(step2);
}
static Map<String, Object> reservationInformation(Reservation reservation, BigDecimal priceToPay)
{
ReservationStatusTypeEnum status = ReservationStatusTypeEnum.RESERVATION_PENDING;
if(reservation.getPaid())
{
status = ReservationStatusTypeEnum.RESERVATION_CONFIRMED;
}
else if(reservation.getCancelled())
{
status = ReservationStatusTypeEnum.RESERVATION_CANCELLED;
}
/*
* TODO:
* "ticketToken": "qrCode:AB34",
* "ticketNumber": "abc123",
* "numSeats": "1"
*/
// TODO: set paid for the template somehow. status is a bit odd to use
EventReservation eventReservation = CoreFactory.newEventReservationBuilder()
// .addReservationId("" + reservation.id) //
.addProperty("reservationNumber", "" + reservation.id) //
.addReservationStatus(status) //
.addUnderName(CoreFactory.newPersonBuilder() //
.addName(reservation.getName()) //
.addEmail(reservation.getEmail()) //
.addTelephone(reservation.getPhone())) //
.addProperty("numSeats", "" + reservation.getNrOfSeats()) //
.addDescription(reservation.getNrOfSeats() + " seats") //
.addReservationFor(event) //
.addTotalPrice(CoreFactory.newPriceSpecificationBuilder() //
.addPriceCurrency(CURRENCY) //
.addPrice(priceToPay.toString()))
.build();
String asJsonLd = getAsJson(eventReservation);
HashMap<String, Object> map = new HashMap<>(new Gson().fromJson(asJsonLd, Map.class));
map.put("jsonLd", asJsonLd);
return map;
}
private static BigDecimal priceToPay(long nrOfSeats, String discount)
{
return ticketPrice.multiply(new BigDecimal(nrOfSeats)).multiply(determinePriceModifier(discount)).setScale(0, RoundingMode.HALF_UP);
}
private static BigDecimal priceToPay(Reservation reservation)
{
return ticketPrice.multiply(new BigDecimal(reservation.getNrOfSeats())).multiply(determinePriceModifier(reservation.getDiscount()))
.setScale(0, RoundingMode.HALF_UP);
}
private void sendStep1ConfirmationEmail(Reservation reservation) throws EmailException
{
sendEmail( reservation, reservation.getName() + " your reservation is partially completed", "event-reservation-confirmation-email.ftlh",
"reservations");
}
private void sendReminderEmail(Reservation reservation) throws EmailException
{
sendEmail(reservation, "See you soon @ " + eventName + "!", "reservation-reminder.ftlh", "reminder");
}
private void sendConfirmationEmail(Reservation reservation)
{
sendEmail(reservation, "Payment confirmed for " + eventName, "reservation-confirmation.ftlh", "reminder");
}
private void sendEmail(Reservation reservation, String subject, String template, String category)
{
Email from = new Email("[email protected]", "Malmö Improvisatorium Reservations");
Email to = new Email(reservation.getEmail(), reservation.getName());
String emailText = generateTemplateWithData(template, reservation);
Content content = new Content("text/html", emailText);
Mail mail = new Mail(from, subject, to, content);
String apiKey = loadConfig(CONFIG_KEY_SENDGRID);
SendGrid sg = new SendGrid(apiKey);
Request request = new Request();
try
{
mail.addCategory(category);
request.setMethod(Method.POST);
request.setEndpoint("mail/send");
request.setBody(mail.build());
Response response = sg.api(request);
LOG.info("Returned status code: {}", response.getStatusCode());
LOG.info("Body: {}", response.getBody());
LOG.info("Headers: {}", response.getHeaders());
}
catch(IOException ex)
{
throw new EmailException("Failed to send reservation email to " + reservation.getEmail(), ex);
}
}
static String generateTemplateWithData(String templateName, Reservation reservation) throws EmailException
{
BigDecimal priceToPay = priceToPay(reservation.getNrOfSeats(), reservation.getDiscount());
Map<String, Object> map = reservationInformation(reservation, priceToPay);
try
{
Template temp = cfg.getTemplate(templateName);
StringWriter out = new StringWriter();
temp.process(map, out);
return out.toString();
}
catch(IOException | TemplateException ex)
{
throw new EmailException("Failed to generate reservation email", ex);
}
}
private static final JsonLdSerializer serializer = new JsonLdSerializer(true /* setPrettyPrinting */);
public static String getAsJson(EventReservation reservation)
{
try
{
return serializer.serialize(reservation);
}
catch(JsonLdSyntaxException | JsonIOException e)
{
throw new RuntimeException("Failed to generate schema.org string", e);
}
}
private static BigDecimal determinePriceModifier(String discount)
{
switch(discount)
{
// case "Folk Universitetet":
// return folkUniPricePercentage;
case "MAF-member":
case "Student":
return memberPricePercentage;
case "Normal":
default:
return BigDecimal.ONE;
}
}
@SuppressWarnings("deprecation")
@WebServlet(urlPatterns = "/*", name = "MyUIServlet", asyncSupported = true)
@VaadinServletConfiguration(ui = MyUI.class, productionMode = true)
public static class MyUIServlet extends GAEVaadinServlet
{
private static final long serialVersionUID = 1L;
@Override
public void init(ServletConfig servletConfig) throws ServletException
{
super.init(servletConfig);
// ObjectifyService.init();
// ObjectifyService.init(new
// ObjectifyFactory(DatastoreOptions.newBuilder().setCredentials(GoogleCredentials.getApplicationDefault()).build().getService()));
// ObjectifyService.register(Reservation.class);
// ObjectifyService.register(SeatsRemaining.class);
// ObjectifyService.register(Config.class);
}
@Override
protected void service(HttpServletRequest unwrappedRequest, HttpServletResponse unwrappedResponse) throws ServletException, IOException
{
// System.out.println("Env:" + System.getenv());
// System.out.println("Properties:" + System.getProperties());
super.service(unwrappedRequest, unwrappedResponse);
}
@Override
public void destroy()
{
super.destroy();
// MemcacheServiceFactory.getMemcacheService().clearAll();
}
}
@WebFilter(urlPatterns = "/*", asyncSupported = true)
public static class MyObjectifyFilter extends ObjectifyFilter
{
@Override
public void init(FilterConfig filterConfig) throws ServletException
{
ObjectifyService.register(Reservation.class);
ObjectifyService.register(SeatsRemaining.class);
ObjectifyService.register(Config.class);
try(Closeable closeable = ObjectifyService.begin())
{
ObjectifyService.ofy().transactNew(new VoidWork(){
@Override
public void vrun()
{
LOG.info("Configuring seats");
Objectify ofy = ObjectifyService.ofy();
SeatsRemaining now = ofy.load().key(Key.create(SeatsRemaining.class, "" + EVENT_ID)).now();
if(now == null)
{
ofy.save().entities(new SeatsRemaining().setEventId("" + EVENT_ID).setSeatsRemaining(initialSeatCapacity)).now();
}
String sendgridKey = System.getProperty("config." + CONFIG_KEY_SENDGRID);
if(sendgridKey != null)
{
Config alreadyExists = ofy.load().key(Key.create(Config.class, CONFIG_KEY_SENDGRID)).now();
if(alreadyExists == null)
{
Config config = new Config();
config.setKey(CONFIG_KEY_SENDGRID);
config.setValue(sendgridKey);
ofy.save().entities(config).now();
}
}
}
});
}
}
}
}
| src/main/java/com/malmoimprov/MyUI.java | package com.malmoimprov;
import java.io.IOException;
import java.io.StringWriter;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.FilterConfig;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebFilter;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.vaadin.crudui.crud.impl.GridBasedCrudComponent;
import com.google.appengine.api.users.User;
import com.google.appengine.api.users.UserService;
import com.google.appengine.api.users.UserServiceFactory;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.gson.Gson;
import com.google.gson.JsonIOException;
import com.google.schemaorg.JsonLdSerializer;
import com.google.schemaorg.JsonLdSyntaxException;
import com.google.schemaorg.core.CoreFactory;
import com.google.schemaorg.core.EventReservation;
import com.google.schemaorg.core.ReservationStatusTypeEnum;
import com.googlecode.objectify.Key;
import com.googlecode.objectify.Objectify;
import com.googlecode.objectify.ObjectifyFilter;
import com.googlecode.objectify.ObjectifyService;
import com.googlecode.objectify.VoidWork;
import com.googlecode.objectify.util.Closeable;
import com.sendgrid.Content;
import com.sendgrid.Email;
import com.sendgrid.Mail;
import com.sendgrid.Method;
import com.sendgrid.Request;
import com.sendgrid.Response;
import com.sendgrid.SendGrid;
import com.vaadin.annotations.Theme;
import com.vaadin.annotations.VaadinServletConfiguration;
import com.vaadin.annotations.Viewport;
import com.vaadin.data.BeanValidationBinder;
import com.vaadin.data.Binder;
import com.vaadin.data.ValidationException;
import com.vaadin.data.converter.StringToIntegerConverter;
import com.vaadin.icons.VaadinIcons;
import com.vaadin.server.ExternalResource;
import com.vaadin.server.GAEVaadinServlet;
import com.vaadin.server.Responsive;
import com.vaadin.server.VaadinRequest;
import com.vaadin.server.WebBrowser;
import com.vaadin.shared.ui.ContentMode;
import com.vaadin.ui.Button;
import com.vaadin.ui.Component;
import com.vaadin.ui.ComponentContainer;
import com.vaadin.ui.FormLayout;
import com.vaadin.ui.Grid;
import com.vaadin.ui.Grid.SelectionMode;
import com.vaadin.ui.Image;
import com.vaadin.ui.Label;
import com.vaadin.ui.Link;
import com.vaadin.ui.RadioButtonGroup;
import com.vaadin.ui.TextField;
import com.vaadin.ui.UI;
import com.vaadin.ui.VerticalLayout;
import com.vaadin.ui.renderers.ButtonRenderer;
import freemarker.template.Configuration;
import freemarker.template.Template;
import freemarker.template.TemplateException;
import freemarker.template.TemplateExceptionHandler;
/**
* This UI is the application entry point. A UI may either represent a browser window
* (or tab) or some part of a html page where a Vaadin application is embedded.
* <p>
* The UI is initialized using {@link #init(VaadinRequest)}. This method is intended to be
* overridden to add component to the user interface and initialize non-component functionality.
*/
@Theme("mytheme")
@Viewport("initial-scale=1.0, width=device-width")
public class MyUI extends UI
{
private static final long serialVersionUID = 1L;
private static final Logger LOG = LoggerFactory.getLogger(MyUI.class);
private static final String CONFIG_KEY_SENDGRID = "SENDGRID";
static final long EVENT_ID = 25;
private static final String CURRENCY = "SEK";
private static final String PHONENUMBER_TO_PAY_TO = "0764088570";
private static final long initialSeatCapacity = 35;
private static final BigDecimal ticketPrice = new BigDecimal("50");
private static final BigDecimal memberPricePercentage = new BigDecimal("0.80");
// private static final BigDecimal folkUniPricePercentage = new BigDecimal("0.60");
private static final String facebookEventUrl = "https://www.facebook.com/events/354541016384439";
/**
* https://console.cloud.google.com/storage/browser?folder=&organizationId=&project=malmo-improv
*/
private static final String imageLink = "https://storage.googleapis.com/malmo-improv.appspot.com/events/Gladiator.png";
static final String eventName = "Gladiator - Improv Show";
private static final com.google.schemaorg.core.Event event = CoreFactory.newTheaterEventBuilder() //
.addUrl(facebookEventUrl) //
.addName(eventName) //
.addImage(imageLink) //
.addOrganizer("Malmö Improvisatorium") //
.addStartDate("2021-10-01T19:00:00+02:00") //
.addDuration("PT1H30M") //
.addLocation(CoreFactory.newPlaceBuilder() //
.addName("MAF, scen 1") //
.addAddress(CoreFactory.newPostalAddressBuilder() //
.addStreetAddress("N. Skolgatan 10A") //
.addAddressLocality("Malmö") //
.addAddressRegion("SE-M") //
.addPostalCode("21153") //
.addAddressCountry("SE"))) //
.addProperty("phoneNumber", PHONENUMBER_TO_PAY_TO).build();
private static final Configuration cfg;
static
{
// SLF4JBridgeHandler.install();
cfg = new Configuration(Configuration.VERSION_2_3_25);
cfg.setClassForTemplateLoading(MyUI.class, "/email-templates/");
cfg.setDefaultEncoding("UTF-8");
cfg.setTemplateExceptionHandler(TemplateExceptionHandler.RETHROW_HANDLER);
cfg.setLogTemplateExceptions(false);
}
private Binder<Reservation> binder;
@Override
protected void init(VaadinRequest vaadinRequest)
{
binder = new BeanValidationBinder<>(Reservation.class);
final VerticalLayout page = new VerticalLayout();
Image banner = new Image("", new ExternalResource(imageLink));
// banner.setSizeFull();
banner.setWidth(30, Unit.PERCENTAGE);
// banner.addStyleName("jonatan");
// banner.setWidth(800, Unit.PIXELS);
page.addComponent(banner);
SeatsRemaining seatsRemaining = loadSeatsRemaining(ObjectifyService.ofy());
if(vaadinRequest.getParameter("admin") != null)
{
UserService userService = UserServiceFactory.getUserService();
if(userService.isUserLoggedIn())
{
User currentUser = userService.getCurrentUser();
ImmutableSet<String> admins = ImmutableSet.of("[email protected]", "[email protected]", "[email protected]");
boolean isAdmin = admins.contains(currentUser.getEmail().toLowerCase());
if(isAdmin)
{
loggedInPage(page);
}
else
{
page.addComponent(new Label("Your user is not authorized to manage reservations"));
}
page.addComponent(new Link("Logout", new ExternalResource(userService.createLogoutURL("//"))));
}
else
{
page.addComponent(new Link("Login", new ExternalResource(userService.createLoginURL("/?admin"))));
}
}
else if(seatsRemaining.getSeatsRemaining() > 0)
{
page.addComponent(step1(page, seatsRemaining));
}
else
{
page.addComponent(fullyBooked());
}
Responsive.makeResponsive(page);// TODO doesn't work?
// setContent(banner);
setContent(page);
}
private void loggedInPage(final VerticalLayout page)
{
GridBasedCrudComponent<Reservation> reservations = new GridBasedCrudComponent<>(Reservation.class);
reservations.getCrudFormFactory().setUseBeanValidation(true);
reservations.setUpdateOperation(updatedReservation -> {
Objectify ofy = ObjectifyService.ofy();
Reservation oldReservation = ofy.load().entity(updatedReservation).now();
List<Object> entititesToUpdate = Lists.newArrayList(updatedReservation);
if(!oldReservation.getCancelled() && updatedReservation.getCancelled())
{
SeatsRemaining seatsRemainingCheck = loadSeatsRemaining(ofy);
seatsRemainingCheck.setSeatsRemaining(seatsRemainingCheck.getSeatsRemaining() + updatedReservation.getNrOfSeats());
entititesToUpdate.add(seatsRemainingCheck);
}
ofy.save().entities(entititesToUpdate).now();
return updatedReservation;
});
reservations.setFindAllOperation(() -> ObjectifyService.ofy().load().type(Reservation.class).filter("eventId = ", EVENT_ID).list());
page.addComponent(reservations);
page.addComponent(new Button("Send 10 event reminders", (e) -> {
sendEventReminders(page, 10);
}));
page.addComponent(new Button("Send 10 event confirmations to people who paid", (e) -> {
sendEventConfirmations(page, 10);
}));
page.addComponent(new Button("Take attendance", (e) -> {
attendanceList(page);
}));
// page.addComponent(new Button("Migrate reservations", (e) -> {
// migrateReservations();
// }));
// page.addComponent(new Button("Create new event", (e) -> {
// page.addComponent(EventCreationPage.form());
// }));
}
private void attendanceList(VerticalLayout page)
{
List<Reservation> list = ObjectifyService.ofy().load().type(Reservation.class).filter("eventId = ", EVENT_ID).filter("cancelled =", false)
.list();
// Have some data
// Create a grid bound to the list
Grid<Reservation> grid = new Grid<>();
grid.setWidth(100, Unit.PERCENTAGE);
grid.setItems(list);
grid.addColumn(Reservation::getName).setCaption("Name");
grid.addColumn(Reservation::getPhone).setCaption("Phone");
grid.addColumn(Reservation::getNrOfSeats).setCaption("Seats");
grid.addColumn(MyUI::priceToPay).setCaption("Price");
grid.addColumn(Reservation::getPaid).setCaption("Paid");
grid.setSelectionMode(SelectionMode.NONE);
// grid.addColumn(Reservation::getAttended).setCaption("Attended");
grid.addColumn(person -> "Attended", new ButtonRenderer<Reservation>(clickEvent -> {
Reservation attended = clickEvent.getItem();
attended.setAttended(true);
ObjectifyService.ofy().save().entities(attended).now();
list.remove(attended);
grid.setItems(list);
}));
BigDecimal totalPrepaid = list.stream().filter(r -> r.getPaid()).map(MyUI::priceToPay).reduce(BigDecimal.ZERO, BigDecimal::add);
page.addComponent(grid);
page.addComponent(new Label("Total prepaid: " + totalPrepaid));
}
private void sendEventConfirmations(VerticalLayout page, int nrOfConfirmationsToSend)
{
Objectify ofy = ObjectifyService.ofy();
// List<Reservation> list = ofy.load().type(Reservation.class).list();
// list.forEach(r -> {
// // Test email
// if(r.getEmail().equals("[email protected]"))
// {
// sendConfirmationEmail(r);
// Label label = new Label(r.toString());
// label.setWidth(100, Unit.PERCENTAGE);
// page.addComponent(label);
// }
// });
// return;
List<Reservation> list = ofy.load().type(Reservation.class).filter("eventId = ", EVENT_ID).filter("cancelled =", false)
.filter("sentConfirmationAboutEvent =", false).filter("paid =", true).limit(nrOfConfirmationsToSend).list();
list.forEach(r -> {
sendConfirmationEmail(r);
r.setSentConfirmationAboutEvent(true);
ofy.save().entities(r).now();
});
String reservationsAsStr = list.toString();
Label label = new Label(reservationsAsStr);
label.setWidth(100, Unit.PERCENTAGE);
page.addComponent(label);
}
private void migrateReservations()
{
Objectify ofy = ObjectifyService.ofy();
List<Reservation> list = ofy.load().type(Reservation.class).list();
ofy.save().entities(list).now();
}
private void sendEventReminders(final VerticalLayout page, int nrOfRemindersToSend)
{
Objectify ofy = ObjectifyService.ofy();
/*
* List<Reservation> list = ofy.load().type(Reservation.class).list();
* list.forEach(r ->
* {
* //Test email
* if(r.getEmail().equals("[email protected]"))
* {
* sendReminderEmail(r);
* Label label = new Label(r.toString());
* label.setWidth(100, Unit.PERCENTAGE);
* page.addComponent(label);
* }
* }
*/
List<Reservation> list = ofy.load().type(Reservation.class).filter("eventId = ", EVENT_ID).filter("cancelled =", false)
.filter("sentReminderAboutEvent =", false).limit(nrOfRemindersToSend).list();
list.forEach(r -> {
sendReminderEmail(r);
r.setSentReminderAboutEvent(true);
ofy.save().entities(r).now();
});
String reservationsAsStr = list.toString();
Label label = new Label(reservationsAsStr);
label.setWidth(100, Unit.PERCENTAGE);
page.addComponent(label);
}
private SeatsRemaining loadSeatsRemaining(Objectify ofy)
{
return ofy.load().key(Key.create(SeatsRemaining.class, "" + EVENT_ID)).now();
}
private static String loadConfig(String key)
{
Config configEntry = ObjectifyService.ofy().load().key(Key.create(Config.class, key)).now();
if(configEntry == null)
throw new IllegalStateException("No config for key: " + key);
return configEntry.getValue();
}
private VerticalLayout fullyBooked()
{
final VerticalLayout fullyBooked = new VerticalLayout();
Label text = new Label(eventName + " sold out. Better luck next time!");
text.addStyleName("small");
text.setWidth(100, Unit.PERCENTAGE);
fullyBooked.addComponent(text);
// Image banner = new Image("", new ClassResource("/contagious-soldout.jpg"));
// banner.addStyleName("jonatan");
// banner.setWidth(100, Unit.PERCENTAGE);
Link facebookLink = new Link("Follow us on facebook for future events!", new ExternalResource("https://www.facebook.com/improvisatorium/"));
facebookLink.setIcon(VaadinIcons.FACEBOOK_SQUARE);
facebookLink.addStyleName("small");
facebookLink.setTargetName("_blank");
// fullyBooked.addComponents(text, banner, facebookLink);
fullyBooked.addComponents(text, facebookLink);
return fullyBooked;
}
private Component step1(final VerticalLayout page, SeatsRemaining seatsRemaining)
{
final VerticalLayout step1Container = new VerticalLayout();
String defaultDiscountType = "Normal";
final Label instructions = new Label("Step 1/2: Reserve your seats for <b>" + eventName + "</b> by filling in your details here:",
ContentMode.HTML);
instructions.setWidth(100, Unit.PERCENTAGE);
int defaultNrOfSeats = 1;
final Label price = new Label(priceDescription(defaultNrOfSeats, defaultDiscountType));
final FormLayout step1 = new FormLayout();
final TextField nrOfSeats = new TextField();
nrOfSeats.setValue("" + defaultNrOfSeats);
nrOfSeats.setCaptionAsHtml(true);
StringBuilder caption = new StringBuilder("Nr of seats to reserve <br/>(max 5)");
if(seatsRemaining.getSeatsRemaining() <= 10)
{
caption.append(" (" + seatsRemaining.getSeatsRemaining() + " remaining)");
}
nrOfSeats.setCaption(caption.toString());
nrOfSeats.setRequiredIndicatorVisible(true);
binder.forField(nrOfSeats).withConverter(new StringToIntegerConverter("Invalid nr of seats")).bind("nrOfSeats");
RadioButtonGroup<String> discounts = new RadioButtonGroup<>("Discounts");
// "Folk Universitetet"
discounts.setItems("Normal", "MAF-member", "Student");
discounts.setSelectedItem(defaultDiscountType);
binder.forField(discounts).bind("discount");
Button reserveButton = new Button("Reserve seats");
reserveButton.setEnabled(false);
binder.addValueChangeListener((e) -> {
reserveButton.setEnabled(binder.isValid());
try
{
price.setValue(priceDescription(Long.parseLong(nrOfSeats.getValue()), discounts.getSelectedItem().get()));
price.markAsDirty();
}
catch(NumberFormatException invalid)
{
}
});
reserveButton.addClickListener(e -> {
reserveButtonClicked(page, step1Container);
});
final TextField name = new TextField();
name.setCaption("Name:");
name.setRequiredIndicatorVisible(true);
binder.forField(name).bind("name");
final TextField email = new TextField();
email.setCaption("Email:");
email.setRequiredIndicatorVisible(true);
binder.forField(email).bind("email");
final TextField phone = new TextField();
phone.setCaption("Phone:");
phone.setRequiredIndicatorVisible(true);
binder.forField(phone).bind("phone");
step1.addComponents(name, email, phone, discounts, nrOfSeats, reserveButton);
step1Container.addComponents(instructions, step1, price);
return step1Container;
}
private String priceDescription(long nrOfSeats, String defaultDiscountType)
{
return "Total ticket price: " + priceToPay(nrOfSeats, defaultDiscountType) + " " + CURRENCY;
}
private void reserveButtonClicked(final ComponentContainer page, final Component step1)
{
Reservation reservation = new Reservation();
try
{
binder.writeBean(reservation);
}
catch(ValidationException e1)
{
throw new RuntimeException("server side validation failed after client validation passed, forgot to add a UI component?", e1);
}
WebBrowser webBrowser = UI.getCurrent().getPage().getWebBrowser();
reservation.setCreationTime(webBrowser.getCurrentDate());
reservation.setEventId(EVENT_ID);
Objectify ofy = ObjectifyService.ofy();
SeatsRemaining seatsRemainingCheck = loadSeatsRemaining(ofy);
if(seatsRemainingCheck.getSeatsRemaining() < reservation.getNrOfSeats())
throw new RuntimeException(
"Got booked while you were entering your data. Only " + seatsRemainingCheck.getSeatsRemaining() + " seats are now remaining.");
seatsRemainingCheck.setSeatsRemaining(seatsRemainingCheck.getSeatsRemaining() - reservation.getNrOfSeats());
Map<Key<Object>, Object> savedData = ofy.save().entities(seatsRemainingCheck, reservation).now();
reservation = (Reservation) savedData.get(Key.create(reservation));
sendStep1ConfirmationEmail(reservation);
final VerticalLayout step2 = new VerticalLayout();
BigDecimal priceToPay = priceToPay(reservation.getNrOfSeats(), reservation.getDiscount());
Label instructions2 = new Label("<b>Step 2/2</b>: Swish " + priceToPay.longValue() + " " + CURRENCY + " to " + PHONENUMBER_TO_PAY_TO
+ " to finalize your reservation. <br/><b>Note:</b>Your tickets are reserved for 3 days. Please remember to buy them via swish to finish the booking.",
ContentMode.HTML);
step2.addComponents(new Label(
"Thanks " + reservation.getName() + ", your reservation of " + reservation.getNrOfSeats()
+ " seat(s) is noted! Your reservation number is " + reservation.id + ".<br/>An email confirmation has been sent to "
+ reservation.getEmail() + ". <br/><br/>",
ContentMode.HTML), instructions2);
Link facebookLink = new Link("Remember to also sign up for the event on facebook!", new ExternalResource(facebookEventUrl));
facebookLink.setIcon(VaadinIcons.FACEBOOK_SQUARE);
facebookLink.setTargetName("_blank");
step2.addComponent(facebookLink);
page.removeComponent(step1);
page.addComponent(step2);
}
static Map<String, Object> reservationInformation(Reservation reservation, BigDecimal priceToPay)
{
ReservationStatusTypeEnum status = ReservationStatusTypeEnum.RESERVATION_PENDING;
if(reservation.getPaid())
{
status = ReservationStatusTypeEnum.RESERVATION_CONFIRMED;
}
else if(reservation.getCancelled())
{
status = ReservationStatusTypeEnum.RESERVATION_CANCELLED;
}
/*
* TODO:
* "ticketToken": "qrCode:AB34",
* "ticketNumber": "abc123",
* "numSeats": "1"
*/
// TODO: set paid for the template somehow. status is a bit odd to use
EventReservation eventReservation = CoreFactory.newEventReservationBuilder()
// .addReservationId("" + reservation.id) //
.addProperty("reservationNumber", "" + reservation.id) //
.addReservationStatus(status) //
.addUnderName(CoreFactory.newPersonBuilder() //
.addName(reservation.getName()) //
.addEmail(reservation.getEmail()) //
.addTelephone(reservation.getPhone())) //
.addProperty("numSeats", "" + reservation.getNrOfSeats()) //
.addDescription(reservation.getNrOfSeats() + " seats") //
.addReservationFor(event) //
.addTotalPrice(CoreFactory.newPriceSpecificationBuilder() //
.addPriceCurrency(CURRENCY) //
.addPrice(priceToPay.toString()))
.build();
String asJsonLd = getAsJson(eventReservation);
HashMap<String, Object> map = new HashMap<>(new Gson().fromJson(asJsonLd, Map.class));
map.put("jsonLd", asJsonLd);
return map;
}
private static BigDecimal priceToPay(long nrOfSeats, String discount)
{
return ticketPrice.multiply(new BigDecimal(nrOfSeats)).multiply(determinePriceModifier(discount)).setScale(0, RoundingMode.HALF_UP);
}
private static BigDecimal priceToPay(Reservation reservation)
{
return ticketPrice.multiply(new BigDecimal(reservation.getNrOfSeats())).multiply(determinePriceModifier(reservation.getDiscount()))
.setScale(0, RoundingMode.HALF_UP);
}
private void sendStep1ConfirmationEmail(Reservation reservation) throws EmailException
{
sendEmail( reservation, reservation.getName() + " your reservation is partially completed", "event-reservation-confirmation-email.ftlh",
"reservations");
}
private void sendReminderEmail(Reservation reservation) throws EmailException
{
sendEmail(reservation, "See you soon @ " + eventName + "!", "reservation-reminder.ftlh", "reminder");
}
private void sendConfirmationEmail(Reservation reservation)
{
sendEmail(reservation, "Payment confirmed for " + eventName, "reservation-confirmation.ftlh", "reminder");
}
private void sendEmail(Reservation reservation, String subject, String template, String category)
{
Email from = new Email("[email protected]", "Malmö Improvisatorium Reservations");
Email to = new Email(reservation.getEmail(), reservation.getName());
String emailText = generateTemplateWithData(template, reservation);
Content content = new Content("text/html", emailText);
Mail mail = new Mail(from, subject, to, content);
String apiKey = loadConfig(CONFIG_KEY_SENDGRID);
SendGrid sg = new SendGrid(apiKey);
Request request = new Request();
try
{
mail.addCategory(category);
request.setMethod(Method.POST);
request.setEndpoint("mail/send");
request.setBody(mail.build());
Response response = sg.api(request);
LOG.info("Returned status code: {}", response.getStatusCode());
LOG.info("Body: {}", response.getBody());
LOG.info("Headers: {}", response.getHeaders());
}
catch(IOException ex)
{
throw new EmailException("Failed to send reservation email to " + reservation.getEmail(), ex);
}
}
static String generateTemplateWithData(String templateName, Reservation reservation) throws EmailException
{
BigDecimal priceToPay = priceToPay(reservation.getNrOfSeats(), reservation.getDiscount());
Map<String, Object> map = reservationInformation(reservation, priceToPay);
try
{
Template temp = cfg.getTemplate(templateName);
StringWriter out = new StringWriter();
temp.process(map, out);
return out.toString();
}
catch(IOException | TemplateException ex)
{
throw new EmailException("Failed to generate reservation email", ex);
}
}
private static final JsonLdSerializer serializer = new JsonLdSerializer(true /* setPrettyPrinting */);
public static String getAsJson(EventReservation reservation)
{
try
{
return serializer.serialize(reservation);
}
catch(JsonLdSyntaxException | JsonIOException e)
{
throw new RuntimeException("Failed to generate schema.org string", e);
}
}
private static BigDecimal determinePriceModifier(String discount)
{
switch(discount)
{
// case "Folk Universitetet":
// return folkUniPricePercentage;
case "MAF-member":
case "Student":
return memberPricePercentage;
case "Normal":
default:
return BigDecimal.ONE;
}
}
@SuppressWarnings("deprecation")
@WebServlet(urlPatterns = "/*", name = "MyUIServlet", asyncSupported = true)
@VaadinServletConfiguration(ui = MyUI.class, productionMode = true)
public static class MyUIServlet extends GAEVaadinServlet
{
private static final long serialVersionUID = 1L;
@Override
public void init(ServletConfig servletConfig) throws ServletException
{
super.init(servletConfig);
// ObjectifyService.init();
// ObjectifyService.init(new
// ObjectifyFactory(DatastoreOptions.newBuilder().setCredentials(GoogleCredentials.getApplicationDefault()).build().getService()));
// ObjectifyService.register(Reservation.class);
// ObjectifyService.register(SeatsRemaining.class);
// ObjectifyService.register(Config.class);
}
@Override
protected void service(HttpServletRequest unwrappedRequest, HttpServletResponse unwrappedResponse) throws ServletException, IOException
{
// System.out.println("Env:" + System.getenv());
// System.out.println("Properties:" + System.getProperties());
super.service(unwrappedRequest, unwrappedResponse);
}
@Override
public void destroy()
{
super.destroy();
// MemcacheServiceFactory.getMemcacheService().clearAll();
}
}
@WebFilter(urlPatterns = "/*", asyncSupported = true)
public static class MyObjectifyFilter extends ObjectifyFilter
{
@Override
public void init(FilterConfig filterConfig) throws ServletException
{
ObjectifyService.register(Reservation.class);
ObjectifyService.register(SeatsRemaining.class);
ObjectifyService.register(Config.class);
try(Closeable closeable = ObjectifyService.begin())
{
ObjectifyService.ofy().transactNew(new VoidWork(){
@Override
public void vrun()
{
LOG.info("Configuring seats");
Objectify ofy = ObjectifyService.ofy();
SeatsRemaining now = ofy.load().key(Key.create(SeatsRemaining.class, "" + EVENT_ID)).now();
if(now == null)
{
ofy.save().entities(new SeatsRemaining().setEventId("" + EVENT_ID).setSeatsRemaining(initialSeatCapacity)).now();
}
String sendgridKey = System.getProperty("config." + CONFIG_KEY_SENDGRID);
if(sendgridKey != null)
{
Config alreadyExists = ofy.load().key(Key.create(Config.class, CONFIG_KEY_SENDGRID)).now();
if(alreadyExists == null)
{
Config config = new Config();
config.setKey(CONFIG_KEY_SENDGRID);
config.setValue(sendgridKey);
ofy.save().entities(config).now();
}
}
}
});
}
}
}
}
| Time Capsule
| src/main/java/com/malmoimprov/MyUI.java | Time Capsule |
|
Java | cc0-1.0 | 549936284293b32ac354859944b18262b1569818 | 0 | ssarvad8/javamon | cat.java | public class cat extends poison
{
private String color;
public cat(String name, int stage, String color1)
{
super (name, stage);
color = color1;
}
public String getColor()
{
return color;
}
public void setColor(String newColor)
{
color = newColor;
}
}
| Delete cat.java | cat.java | Delete cat.java |
||
Java | lgpl-2.1 | b96bbefb2f04334f71b9d6001b15ed1623824f21 | 0 | markmc/rhevm-api,markmc/rhevm-api,markmc/rhevm-api | /*
* Copyright © 2011 Red Hat, Inc.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package com.redhat.rhevm.api.model;
public enum DisplayType {
VNC, SPICE;
public String value() {
return name().toLowerCase();
}
public static DisplayType fromValue(String value) {
try {
return valueOf(value.toUpperCase());
} catch (IllegalArgumentException e) {
return null;
}
}
}
| api/src/main/java/com/redhat/rhevm/api/model/DisplayType.java | /*
* Copyright © 2011 Red Hat, Inc.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package com.redhat.rhevm.api.model;
public enum DisplayType {
VNC, SPICE;
public String value() {
return name();
}
public static DisplayType fromValue(String value) {
try {
return valueOf(value);
} catch (IllegalArgumentException e) {
return null;
}
}
}
| api: make display types lowercase
e.g.
<capabilities>
<version minor="2" major="2">
...
<display_types>
<display_type>vnc</display_type>
<display_type>spice</display_type>
</display_types>
...
</version>
</capabilities>
Acked-by: Geert Jansen <[email protected]>
| api/src/main/java/com/redhat/rhevm/api/model/DisplayType.java | api: make display types lowercase |
|
Java | lgpl-2.1 | 6d58f3d12a80e406c1a6db67f2839af680210854 | 0 | paulklinkenberg/Lucee4,paulklinkenberg/Lucee4,andrew-dixon/Lucee4,jzuijlek/Lucee4,paulklinkenberg/Lucee4,jzuijlek/Lucee4,jzuijlek/Lucee4,paulklinkenberg/Lucee4,jzuijlek/Lucee4,andrew-dixon/Lucee4,paulklinkenberg/Lucee4,andrew-dixon/Lucee4,jzuijlek/Lucee4,andrew-dixon/Lucee4,andrew-dixon/Lucee4 | /**
*
* Copyright (c) 2014, the Railo Company Ltd. All rights reserved.
* Copyright (c) 2015, Lucee Assosication Switzerland
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library. If not, see <http://www.gnu.org/licenses/>.
*
**/
package lucee.runtime.tag.util;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map.Entry;
import lucee.commons.lang.StringUtil;
import lucee.runtime.db.SQL;
import lucee.runtime.db.SQLCaster;
import lucee.runtime.db.SQLImpl;
import lucee.runtime.db.SQLItem;
import lucee.runtime.db.SQLItemImpl;
import lucee.runtime.exp.ApplicationException;
import lucee.runtime.exp.DatabaseException;
import lucee.runtime.exp.PageException;
import lucee.runtime.op.Caster;
import lucee.runtime.op.Decision;
import lucee.runtime.type.Array;
import lucee.runtime.type.ArrayImpl;
import lucee.runtime.type.Collection.Key;
import lucee.runtime.type.Struct;
import lucee.runtime.type.util.KeyConstants;
import lucee.runtime.type.util.ListUtil;
public class QueryParamConverter {
public static SQL convert(String sql, Struct params) throws PageException{
Iterator<Entry<Key, Object>> it = params.entryIterator();
ArrayList<SQLItems<NamedSQLItem>> namedItems=new ArrayList<SQLItems<NamedSQLItem>>();
Entry<Key, Object> e;
while(it.hasNext()){
e = it.next();
namedItems.add(toNamedSQLItem(e.getKey().getString(),e.getValue()));
}
return convert(sql, new ArrayList<SQLItems<SQLItem>>(), namedItems);
}
public static SQL convert(String sql, Array params) throws PageException{
Iterator<Object> it = params.valueIterator();
ArrayList<SQLItems<NamedSQLItem>> namedItems=new ArrayList<SQLItems<NamedSQLItem>>();
ArrayList<SQLItems<SQLItem>> items=new ArrayList<SQLItems<SQLItem>>();
Object value,paramValue;
while(it.hasNext()){
value = it.next();
if(Decision.isStruct(value)) {
Struct sct=(Struct) value;
// name (optional)
String name=null;
Object oName=sct.get(KeyConstants._name,null);
if(oName!=null) name=Caster.toString(oName);
// value (required)
paramValue=sct.get(KeyConstants._value);
if(StringUtil.isEmpty(name)) {
items.add(new SQLItems<SQLItem>(new SQLItemImpl(paramValue, Types.VARCHAR),sct));
} else {
namedItems.add(new SQLItems<NamedSQLItem>(new NamedSQLItem(name, paramValue, Types.VARCHAR),sct));
}
} else {
items.add(new SQLItems<SQLItem>(new SQLItemImpl(value)));
}
}
return convert(sql, items, namedItems);
}
private static SQLItems<NamedSQLItem> toNamedSQLItem(String name, Object value) throws PageException {
if(Decision.isStruct(value)) {
Struct sct=(Struct) value;
// value (required)
value=sct.get(KeyConstants._value);
return new SQLItems<NamedSQLItem>(new NamedSQLItem(name, value, Types.VARCHAR),sct);
}
return new SQLItems<NamedSQLItem>(new NamedSQLItem(name, value, Types.VARCHAR));
}
private static SQL convert(String sql, ArrayList<SQLItems<SQLItem>> items, ArrayList<SQLItems<NamedSQLItem>> namedItems) throws ApplicationException , PageException {
//if(namedParams.size()==0) return new Pair<String, List<Param>>(sql,params);
StringBuilder sb=new StringBuilder();
int sqlLen=sql.length(), initialParamSize=items.size();
char c,del=0;
boolean inside=false;
int qm=0,_qm=0;
for(int i=0;i<sqlLen;i++){
c=sql.charAt(i);
if(c=='"' || c=='\'') {
if(inside) {
if(c==del) {
inside=false;
}
}
else {
del=c;
inside=true;
}
}
else if(!inside) {
if(c=='?') {
if(++_qm>initialParamSize)
throw new ApplicationException("there are more question marks in the SQL than params defined");
}
else if(c==':') {
StringBuilder name=new StringBuilder();
char cc;
int y=i+1;
for(;y<sqlLen;y++){
cc=sql.charAt(y);
if(!isVariableName(cc, true))break;
name.append(cc);
}
if(name.length()>0) {
i=y-1;
c='?';
items.add( qm , get( name.toString(),namedItems ) );
}
}
}
if(c=='?') {
int len=items.get(qm).size();
for(int j=1;j<=len;j++) {
if(j>1)sb.append(',');
sb.append('?');
}
qm++;
} else {
sb.append(c);
}
}
SQLItems<SQLItem> finalItems=flattenItems( items );
return new SQLImpl(sb.toString(),finalItems.toArray(new SQLItem[finalItems.size()]));
}
private static SQLItems<SQLItem> flattenItems( ArrayList<SQLItems<SQLItem>> items ) {
SQLItems<SQLItem> finalItems = new SQLItems<SQLItem>();
Iterator<SQLItems<SQLItem>> listsToFlatten = items.iterator();
while(listsToFlatten.hasNext()){
finalItems.addAll(listsToFlatten.next());
}
return finalItems;
}
public static boolean isVariableName(char c, boolean alsoNumber) {
if((c>='a' && c<='z')||(c>='A' && c<='Z')||(c=='_')) return true;
if(alsoNumber && (c>='0' && c<='9')) return true;
return false;
}
private static SQLItems<SQLItem> get(String name, ArrayList<SQLItems<NamedSQLItem>> items) throws ApplicationException {
Iterator<SQLItems<NamedSQLItem>> it = items.iterator();
SQLItems<NamedSQLItem> item;
while(it.hasNext()){
item=it.next();
if(item.get(0).name.equalsIgnoreCase(name)) {
return item.convertToSQLItems();
}
}
throw new ApplicationException("no param with name ["+name+"] found");
}
private static class NamedSQLItem extends SQLItemImpl {
public final String name;
public NamedSQLItem(String name, Object value, int type){
super(value,type);
this.name=name;
}
public String toString(){
return "{name:"+name+";"+super.toString()+"}";
}
@Override
public NamedSQLItem clone(Object object) {
NamedSQLItem item = new NamedSQLItem(name,object,getType());
item.setNulls(isNulls());
item.setScale(getScale());
return item;
}
}
private static class SQLItems<T extends SQLItem> extends ArrayList<T> {
public SQLItems() {}
public SQLItems(T item) {
add(item);
}
public SQLItems(T item,Struct sct) throws PageException {
T filledItem = fillSQLItem(item,sct);
Object oList = sct.get(KeyConstants._list,null);
if(oList!=null && Caster.toBooleanValue(oList)){
Object oSeparator = sct.get(KeyConstants._separator,null);
String separator=",";
T clonedItem;
if(oSeparator!=null){
separator=Caster.toString(oSeparator);
}
String v = Caster.toString(filledItem.getValue());
Array values = ListUtil.listToArrayRemoveEmpty(v,separator);
int len=values.size();
for(int i=1;i<=len;i++) {
clonedItem = (T) filledItem.clone(values.getE(i));
add(clonedItem);
}
} else {
add(filledItem);
}
}
private SQLItems<SQLItem> convertToSQLItems() {
Iterator<T> it = iterator();
SQLItems<SQLItem> p = new SQLItems<SQLItem>();
while(it.hasNext()){
p.add((SQLItem) it.next());
}
return p;
}
private T fillSQLItem(T item,Struct sct) throws PageException, DatabaseException {
// type (optional)
Object oType=sct.get(KeyConstants._cfsqltype,null);
if(oType==null)oType=sct.get(KeyConstants._sqltype,null);
if(oType==null)oType=sct.get(KeyConstants._type,null);
if(oType!=null) {
item.setType(SQLCaster.toSQLType(Caster.toString(oType)));
}
// nulls (optional)
Object oNulls=sct.get(KeyConstants._nulls,null);
if(oNulls==null)oNulls=sct.get(KeyConstants._null,null);
if(oNulls!=null) {
item.setNulls(Caster.toBooleanValue(oNulls));
}
// scale (optional)
Object oScale=sct.get(KeyConstants._scale,null);
if(oScale!=null) {
item.setScale(Caster.toIntValue(oScale));
}
return item;
}
}
/*
public static void main(String[] args) throws PageException {
List<SQLItem> one=new ArrayList<SQLItem>();
one.add(new SQLItemImpl("aaa",1));
one.add(new SQLItemImpl("bbb",1));
List<NamedSQLItem> two=new ArrayList<NamedSQLItem>();
two.add(new NamedSQLItem("susi","sorglos",1));
two.add(new NamedSQLItem("peter","Petrus",1));
SQL sql = convert(
"select ? as x, 'aa:a' as x from test where a=:susi and b=:peter and c=? and d=:susi",
one,
two);
print.e(sql);
// array with simple values
Array arr=new ArrayImpl();
arr.appendEL("aaa");
arr.appendEL("bbb");
sql = convert(
"select * from test where a=? and b=?",
arr);
print.e(sql);
// array with complex values
arr=new ArrayImpl();
Struct val1=new StructImpl();
val1.set("value", "Susi Sorglos");
Struct val2=new StructImpl();
val2.set("value", "123");
val2.set("type", "integer");
arr.append(val1);
arr.append(val2);
sql = convert(
"select * from test where a=? and b=?",
arr);
print.e(sql);
// array with mixed values
arr.appendEL("ccc");
arr.appendEL("ddd");
sql = convert(
"select * from test where a=? and b=? and c=? and d=?",
arr);
print.e(sql);
// array mixed with named values
Struct val3=new StructImpl();
val3.set("value", "456");
val3.set("type", "integer");
val3.set("name", "susi");
arr.append(val3);
sql = convert(
"select :susi as name from test where a=? and b=? and c=? and d=?",
arr);
print.e(sql);
// struct with simple values
Struct sct=new StructImpl();
sct.set("abc", "Sorglos");
sql = convert(
"select * from test where a=:abc",
sct);
print.e(sql);
// struct with mixed values
sct.set("peter", val1);
sct.set("susi", val3);
sql = convert(
"select :peter as p, :susi as s from test where a=:abc",
sct);
print.e(sql);
}*/
}
| lucee-java/lucee-core/src/lucee/runtime/tag/util/QueryParamConverter.java | /**
*
* Copyright (c) 2014, the Railo Company Ltd. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library. If not, see <http://www.gnu.org/licenses/>.
*
**/
package lucee.runtime.tag.util;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map.Entry;
import lucee.commons.lang.StringUtil;
import lucee.runtime.db.SQL;
import lucee.runtime.db.SQLCaster;
import lucee.runtime.db.SQLImpl;
import lucee.runtime.db.SQLItem;
import lucee.runtime.db.SQLItemImpl;
import lucee.runtime.exp.ApplicationException;
import lucee.runtime.exp.DatabaseException;
import lucee.runtime.exp.PageException;
import lucee.runtime.op.Caster;
import lucee.runtime.op.Decision;
import lucee.runtime.type.Array;
import lucee.runtime.type.Collection.Key;
import lucee.runtime.type.Struct;
import lucee.runtime.type.util.KeyConstants;
public class QueryParamConverter {
public static SQL convert(String sql, Struct params) throws PageException{
Iterator<Entry<Key, Object>> it = params.entryIterator();
List<NamedSQLItem> namedItems=new ArrayList<NamedSQLItem>();
Entry<Key, Object> e;
while(it.hasNext()){
e = it.next();
namedItems.add(toNamedSQLItem(e.getKey().getString(),e.getValue()));
}
return convert(sql, new ArrayList<SQLItem>(), namedItems);
}
public static SQL convert(String sql, Array params) throws PageException{
Iterator<Object> it = params.valueIterator();
List<NamedSQLItem> namedItems=new ArrayList<NamedSQLItem>();
List<SQLItem> items=new ArrayList<SQLItem>();
Object value;
SQLItem item;
while(it.hasNext()){
value = it.next();
item=toSQLItem(value);
if(item instanceof NamedSQLItem)
namedItems.add((NamedSQLItem) item);
else
items.add(item);
}
return convert(sql, items, namedItems);
}
private static SQLItem toSQLItem(Object value) throws PageException {
if(Decision.isStruct(value)) {
Struct sct=(Struct) value;
// name (optional)
String name=null;
Object oName=sct.get(KeyConstants._name,null);
if(oName!=null) name=Caster.toString(oName);
// value (required)
value=sct.get(KeyConstants._value);
if(StringUtil.isEmpty(name))
return fill(new SQLItemImpl(value, Types.VARCHAR),sct);
return fill(new NamedSQLItem(name, value, Types.VARCHAR),sct);
}
return new SQLItemImpl(value);
}
private static NamedSQLItem toNamedSQLItem(String name, Object value) throws PageException {
if(Decision.isStruct(value)) {
Struct sct=(Struct) value;
// value (required)
value=sct.get(KeyConstants._value);
return (NamedSQLItem) fill(new NamedSQLItem(name, value, Types.VARCHAR),sct);
}
return new NamedSQLItem(name, value, Types.VARCHAR);
}
private static SQLItem fill(SQLItem item,Struct sct) throws DatabaseException, PageException {
// type (optional)
Object oType=sct.get(KeyConstants._cfsqltype,null);
if(oType==null)oType=sct.get(KeyConstants._sqltype,null);
if(oType==null)oType=sct.get(KeyConstants._type,null);
if(oType!=null) {
item.setType(SQLCaster.toSQLType(Caster.toString(oType)));
}
// nulls (optional)
Object oNulls=sct.get(KeyConstants._nulls,null);
if(oNulls==null)oNulls=sct.get(KeyConstants._null,null);
if(oNulls!=null) {
item.setNulls(Caster.toBooleanValue(oNulls));
}
// scale (optional)
Object oScale=sct.get(KeyConstants._scale,null);
if(oScale!=null) {
item.setScale(Caster.toIntValue(oScale));
}
/* list
if(Caster.toBooleanValue(sct.get("list",null),false)) {
String separator=Caster.toString(sct.get("separator",null),",");
String v = Caster.toString(item.getValue());
Array arr=null;
if(StringUtil.isEmpty(v)){
arr=new ArrayImpl();
arr.append("");
}
else arr=ListUtil.listToArrayRemoveEmpty(v,separator);
int len=arr.size();
StringBuilder sb=new StringBuilder();
for(int i=1;i<=len;i++) {
query.setParam(item.clone(check(arr.getE(i))));
if(i>1)sb.append(',');
sb.append('?');
}
write(sb.toString());
}*/
return item;
}
private static SQL convert(String sql, List<SQLItem> items, List<NamedSQLItem> namedItems) throws ApplicationException{
//if(namedParams.size()==0) return new Pair<String, List<Param>>(sql,params);
StringBuilder sb=new StringBuilder();
int sqlLen=sql.length(), initialParamSize=items.size();
char c,del=0;
boolean inside=false;
int qm=0,_qm=0;
for(int i=0;i<sqlLen;i++){
c=sql.charAt(i);
if(c=='"' || c=='\'') {
if(inside) {
if(c==del) {
inside=false;
}
}
else {
del=c;
inside=true;
}
}
else {
if(!inside && c=='?') {
if(++_qm>initialParamSize)
throw new ApplicationException("there are more question marks in the SQL than params defined");
qm++;
}
else if(!inside && c==':') {
StringBuilder name=new StringBuilder();
char cc;
int y=i+1;
for(;y<sqlLen;y++){
cc=sql.charAt(y);
if(!isVariableName(cc, true))break;
name.append(cc);
}
if(name.length()>0) {
i=y-1;
c='?';
SQLItem p = get(name.toString(),namedItems);
items.add(qm, p);
qm++;
}
}
}
sb.append(c);
}
return new SQLImpl(sb.toString(),items.toArray(new SQLItem[items.size()]));
}
public static boolean isVariableName(char c, boolean alsoNumber) {
if((c>='a' && c<='z')||(c>='A' && c<='Z')||(c=='_')) return true;
if(alsoNumber && (c>='0' && c<='9')) return true;
return false;
}
private static SQLItem get(String name, List<NamedSQLItem> items) throws ApplicationException {
Iterator<NamedSQLItem> it = items.iterator();
NamedSQLItem item;
while(it.hasNext()){
item=it.next();
if(item.name.equalsIgnoreCase(name)) return item;
}
throw new ApplicationException("no param with name ["+name+"] found");
}
private static class NamedSQLItem extends SQLItemImpl {
public final String name;
public NamedSQLItem(String name, Object value, int type){
super(value,type);
this.name=name;
}
public String toString(){
return "{name:"+name+";"+super.toString()+"}";
}
}
/*
public static void main(String[] args) throws PageException {
List<SQLItem> one=new ArrayList<SQLItem>();
one.add(new SQLItemImpl("aaa",1));
one.add(new SQLItemImpl("bbb",1));
List<NamedSQLItem> two=new ArrayList<NamedSQLItem>();
two.add(new NamedSQLItem("susi","sorglos",1));
two.add(new NamedSQLItem("peter","Petrus",1));
SQL sql = convert(
"select ? as x, 'aa:a' as x from test where a=:susi and b=:peter and c=? and d=:susi",
one,
two);
print.e(sql);
// array with simple values
Array arr=new ArrayImpl();
arr.appendEL("aaa");
arr.appendEL("bbb");
sql = convert(
"select * from test where a=? and b=?",
arr);
print.e(sql);
// array with complex values
arr=new ArrayImpl();
Struct val1=new StructImpl();
val1.set("value", "Susi Sorglos");
Struct val2=new StructImpl();
val2.set("value", "123");
val2.set("type", "integer");
arr.append(val1);
arr.append(val2);
sql = convert(
"select * from test where a=? and b=?",
arr);
print.e(sql);
// array with mixed values
arr.appendEL("ccc");
arr.appendEL("ddd");
sql = convert(
"select * from test where a=? and b=? and c=? and d=?",
arr);
print.e(sql);
// array mixed with named values
Struct val3=new StructImpl();
val3.set("value", "456");
val3.set("type", "integer");
val3.set("name", "susi");
arr.append(val3);
sql = convert(
"select :susi as name from test where a=? and b=? and c=? and d=?",
arr);
print.e(sql);
// struct with simple values
Struct sct=new StructImpl();
sct.set("abc", "Sorglos");
sql = convert(
"select * from test where a=:abc",
sct);
print.e(sql);
// struct with mixed values
sct.set("peter", val1);
sct.set("susi", val3);
sql = convert(
"select :peter as p, :susi as s from test where a=:abc",
sct);
print.e(sql);
}*/
}
| add testcase for https://luceeserver.atlassian.net/browse/LDEV-24
| lucee-java/lucee-core/src/lucee/runtime/tag/util/QueryParamConverter.java | add testcase for https://luceeserver.atlassian.net/browse/LDEV-24 |
|
Java | unlicense | dd8d2cda348aa394533abfb1479ef6877c2f6108 | 0 | MinecraftModArchive/Dendrology | package com.scottkillen.mod.dendrology.world.gen.feature;
import com.scottkillen.mod.dendrology.block.ModBlocks;
import net.minecraft.block.Block;
import net.minecraft.block.material.Material;
import net.minecraft.world.World;
import java.util.Random;
@SuppressWarnings({ "OverlyComplexBooleanExpression", "MethodWithMoreThanThreeNegations", "MethodWithMultipleLoops" })
public class KiparisTree extends AbstractTree
{
public KiparisTree(boolean isFromSapling)
{
super(isFromSapling);
}
private static boolean inRangeInclusive(int value, int min, int max)
{
return min <= value && value <= max;
}
@Override
protected boolean canBeReplacedByLog(World world, int x, int y, int z)
{
return super.canBeReplacedByLog(world, x, y, z) || world.getBlock(x, y, z).getMaterial().equals(Material.water);
}
@Override
protected Block getLeavesBlock() {return ModBlocks.leaves0;}
@Override
protected int getLeavesMetadata() {return 2;}
@Override
protected Block getLogBlock() {return ModBlocks.logs0;}
@Override
protected int getLogMetadata() {return 3;}
@Override
protected boolean isReplaceable(World world, int x, int y, int z)
{
return super.isReplaceable(world, x, y, z) || world.getBlock(x, y, z).getMaterial().equals(Material.water);
}
@SuppressWarnings("OverlyComplexMethod")
@Override
public boolean generate(World world, Random rand, int x, int y, int z)
{
Random rng = new Random();
rng.setSeed(rand.nextLong());
final int size = 1 + (rng.nextInt(7) < 2 ? 1 : 0) + (rng.nextInt(7) < 2 ? 1 : 0) + (rng.nextInt(2) == 0 ? 1 : 0);
final int height = 4 * size + 1;
if (isPoorGrowthConditions(world, x, y, z, height, ModBlocks.sapling0)) return false;
final Block block = world.getBlock(x, y - 1, z);
block.onPlantGrow(world, x, y - 1, z, x, y, z);
for (int dY = 0; dY <= height; dY++)
{
if (dY != height) placeLog(world, x, y + dY, z);
if (dY >= 1)
{
switch (size)
{
case 1:
genSmallLeaves(world, x, y + dY, z);
break;
case 2:
genMediumLeaves(world, x, y, z, dY);
break;
case 3:
genLargeLeaves(world, x, y, z, dY);
break;
default:
genExtraLargeLeaves(world, x, y, z, dY);
break;
}
}
if (dY == height) placeLeaves(world, x, y + dY + 1, z);
if (dY == height && (size == 4 || size == 3)) placeLeaves(world, x, y + dY + 2, z);
}
return true;
}
@SuppressWarnings("ConstantConditions")
private void genExtraLargeLeaves(World world, int x, int y, int z, int dY)
{
for (int dX = -3; dX <= 3; dX++)
for (int dZ = -3; dZ <= 3; dZ++)
{
if (Math.abs(dX) <= 1 && Math.abs(dZ) <= 1 && (Math.abs(dX) != 1 || Math.abs(dZ) != 1))
placeLeaves(world, x + dX, y + dY, z + dZ);
if (Math.abs(dX) <= 1 && Math.abs(dZ) <= 1 && dY <= 14 && dY >= 2)
placeLeaves(world, x + dX, y + dY, z + dZ);
if (Math.abs(dX) <= 2 && Math.abs(dZ) <= 2 && (Math.abs(dX) != 2 || Math.abs(dZ) != 2) && dY == 12 || dY == 11 || dY == 3)
placeLeaves(world, x + dX, y + dY, z + dZ);
if ((Math.abs(dX) != 3 || Math.abs(dZ) != 3) && (Math.abs(dX) != 3 || Math.abs(dZ) != 2) && (Math.abs(dX) != 2 || Math.abs(dZ) != 3) && dY <= 10 && dY >= 4)
placeLeaves(world, x + dX, y + dY, z + dZ);
}
}
private void genLargeLeaves(World world, int x, int y, int z, int dY)
{
for (int dX = -2; dX <= 2; dX++)
for (int dZ = -2; dZ <= 2; dZ++)
{
if (Math.abs(dX) <= 1 && Math.abs(dZ) <= 1 && (Math.abs(dX) != 1 || Math.abs(dZ) != 1))
placeLeaves(world, x + dX, y + dY, z + dZ);
if ((Math.abs(dX) != 2 || Math.abs(dZ) != 2) && (Math.abs(dX) != 2 || Math.abs(dZ) != 1) && (Math.abs(dX) != 1 || Math.abs(dZ) != 2) && dY <= 10 && dY >= 2)
placeLeaves(world, x + dX, y + dY, z + dZ);
}
}
private void genMediumLeaves(World world, int x, int y, int z, int dY)
{
for (int dX = -2; dX <= 2; dX++)
for (int dZ = -2; dZ <= 2; dZ++)
{
if (Math.abs(dX) <= 1 && Math.abs(dZ) <= 1 && (Math.abs(dX) != 1 || Math.abs(dZ) != 1))
{
placeLeaves(world, x + dX, y + dY, z + dZ);
}
if (Math.abs(dX) <= 1 && Math.abs(dZ) <= 1 && dY == 7)
{
placeLeaves(world, x + dX, y + 7, z + dZ);
}
if ((Math.abs(dX) != 2 || Math.abs(dZ) != 2) && (Math.abs(dX) != 2 || Math.abs(dZ) != 1) && (Math.abs(dX) != 1 || Math.abs(dZ) != 2) && dY <= 6 && dY >= 2)
{
placeLeaves(world, x + dX, y + dY, z + dZ);
}
}
}
@SuppressWarnings("MethodWithMultipleLoops")
private void genSmallLeaves(World world, int x, int y, int z)
{
for (int dX = -1; dX <= 1; dX++)
for (int dZ = -1; dZ <= 1; dZ++)
if (Math.abs(dX) != 1 || Math.abs(dZ) != 1) placeLeaves(world, x + dX, y, z + dZ);
}
}
| src/main/java/com/scottkillen/mod/dendrology/world/gen/feature/KiparisTree.java | package com.scottkillen.mod.dendrology.world.gen.feature;
import com.scottkillen.mod.dendrology.block.ModBlocks;
import net.minecraft.block.Block;
import net.minecraft.block.material.Material;
import net.minecraft.world.World;
import java.util.Random;
@SuppressWarnings({ "OverlyComplexBooleanExpression", "MethodWithMoreThanThreeNegations", "MethodWithMultipleLoops" })
public class KiparisTree extends AbstractTree
{
public KiparisTree(boolean isFromSapling)
{
super(isFromSapling);
}
private static boolean inRangeInclusive(int value, int min, int max)
{
return min <= value && value <= max;
}
@Override
protected boolean canBeReplacedByLog(World world, int x, int y, int z)
{
return super.canBeReplacedByLog(world, x, y, z) || world.getBlock(x, y, z).getMaterial().equals(Material.water);
}
@Override
protected Block getLeavesBlock() {return ModBlocks.leaves0;}
@Override
protected int getLeavesMetadata() {return 2;}
@Override
protected Block getLogBlock() {return ModBlocks.logs0;}
@Override
protected int getLogMetadata() {return 3;}
@Override
protected boolean isReplaceable(World world, int x, int y, int z)
{
return super.isReplaceable(world, x, y, z) || world.getBlock(x, y, z).getMaterial().equals(Material.water);
}
@SuppressWarnings("OverlyComplexMethod")
@Override
public boolean generate(World world, Random random, int i, int j, int k)
{
final int size = 1 + (random.nextInt(7) < 2 ? 1 : 0) + (random.nextInt(7) < 2 ? 1 : 0) + (random.nextInt(2) == 0 ? 1 : 0);
final int height = 4 * size + 1;
if (isPoorGrowthConditions(world, i, j, k, height, ModBlocks.sapling0)) return false;
final Block block = world.getBlock(i, j - 1, k);
block.onPlantGrow(world, i, j - 1, k, i, j, k);
for (int dY = 0; dY <= height; dY++)
{
if (dY != height) placeLog(world, i, j + dY, k);
if (dY >= 1)
{
switch (size)
{
case 1:
genSmallLeaves(world, i, j + dY, k);
break;
case 2:
genMediumLeaves(world, i, j, k, dY);
break;
case 3:
genLargeLeaves(world, i, j, k, dY);
break;
default:
genExtraLargeLeaves(world, i, j, k, dY);
break;
}
}
if (dY == height) placeLeaves(world, i, j + dY + 1, k);
if (dY == height && (size == 4 || size == 3)) placeLeaves(world, i, j + dY + 2, k);
}
return true;
}
@SuppressWarnings("ConstantConditions")
private void genExtraLargeLeaves(World world, int x, int y, int z, int dY)
{
for (int dX = -3; dX <= 3; dX++)
for (int dZ = -3; dZ <= 3; dZ++)
{
if (Math.abs(dX) <= 1 && Math.abs(dZ) <= 1 && (Math.abs(dX) != 1 || Math.abs(dZ) != 1))
placeLeaves(world, x + dX, y + dY, z + dZ);
if (Math.abs(dX) <= 1 && Math.abs(dZ) <= 1 && dY <= 14 && dY >= 2)
placeLeaves(world, x + dX, y + dY, z + dZ);
if (Math.abs(dX) <= 2 && Math.abs(dZ) <= 2 && (Math.abs(dX) != 2 || Math.abs(dZ) != 2) && dY == 12 || dY == 11 || dY == 3)
placeLeaves(world, x + dX, y + dY, z + dZ);
if ((Math.abs(dX) != 3 || Math.abs(dZ) != 3) && (Math.abs(dX) != 3 || Math.abs(dZ) != 2) && (Math.abs(dX) != 2 || Math.abs(dZ) != 3) && dY <= 10 && dY >= 4)
placeLeaves(world, x + dX, y + dY, z + dZ);
}
}
private void genLargeLeaves(World world, int x, int y, int z, int dY)
{
for (int dX = -2; dX <= 2; dX++)
for (int dZ = -2; dZ <= 2; dZ++)
{
if (Math.abs(dX) <= 1 && Math.abs(dZ) <= 1 && (Math.abs(dX) != 1 || Math.abs(dZ) != 1))
placeLeaves(world, x + dX, y + dY, z + dZ);
if ((Math.abs(dX) != 2 || Math.abs(dZ) != 2) && (Math.abs(dX) != 2 || Math.abs(dZ) != 1) && (Math.abs(dX) != 1 || Math.abs(dZ) != 2) && dY <= 10 && dY >= 2)
placeLeaves(world, x + dX, y + dY, z + dZ);
}
}
private void genMediumLeaves(World world, int x, int y, int z, int dY)
{
for (int dX = -2; dX <= 2; dX++)
for (int dZ = -2; dZ <= 2; dZ++)
{
if (Math.abs(dX) <= 1 && Math.abs(dZ) <= 1 && (Math.abs(dX) != 1 || Math.abs(dZ) != 1))
{
placeLeaves(world, x + dX, y + dY, z + dZ);
}
if (Math.abs(dX) <= 1 && Math.abs(dZ) <= 1 && dY == 7)
{
placeLeaves(world, x + dX, y + 7, z + dZ);
}
if ((Math.abs(dX) != 2 || Math.abs(dZ) != 2) && (Math.abs(dX) != 2 || Math.abs(dZ) != 1) && (Math.abs(dX) != 1 || Math.abs(dZ) != 2) && dY <= 6 && dY >= 2)
{
placeLeaves(world, x + dX, y + dY, z + dZ);
}
}
}
@SuppressWarnings("MethodWithMultipleLoops")
private void genSmallLeaves(World world, int x, int y, int z)
{
for (int dX = -1; dX <= 1; dX++)
for (int dZ = -1; dZ <= 1; dZ++)
if (Math.abs(dX) != 1 || Math.abs(dZ) != 1) placeLeaves(world, x + dX, y, z + dZ);
}
}
| Fix key pollution
| src/main/java/com/scottkillen/mod/dendrology/world/gen/feature/KiparisTree.java | Fix key pollution |
|
Java | apache-2.0 | f5791bebe85271592329ed9e4abdff3342edd70e | 0 | MatheMatrix/zstack,camilesing/zstack,winger007/zstack,HeathHose/zstack,WangXijue/zstack,HeathHose/zstack,MatheMatrix/zstack,zxwing/zstack-1,zxwing/zstack-1,AlanJinTS/zstack,mingjian2049/zstack,zsyzsyhao/zstack,liningone/zstack,zxwing/zstack-1,AlanJinTS/zstack,zstackio/zstack,zstackio/zstack,mingjian2049/zstack,zsyzsyhao/zstack,hhjuliet/zstack,WangXijue/zstack,zstackorg/zstack,AlanJinTS/zstack,camilesing/zstack,mingjian2049/zstack,Alvin-Lau/zstack,AlanJager/zstack,liningone/zstack,camilesing/zstack,AlanJager/zstack,MaJin1996/zstack,WangXijue/zstack,AlanJager/zstack,hhjuliet/zstack,liningone/zstack,MatheMatrix/zstack,HeathHose/zstack,MaJin1996/zstack,Alvin-Lau/zstack,zstackorg/zstack,winger007/zstack,zstackio/zstack,Alvin-Lau/zstack | package org.zstack.header.network.l2;
import org.zstack.header.message.APIParam;
/**
* @api
* create a l2VlanNetwork
*
* @category l2network
*
* @since 0.1.0
*
* @cli
*
* @httpMsg
* {
"org.zstack.header.network.l2.APICreateL2VlanNetworkMsg": {
"vlan": 10,
"name": "TestL2VlanNetwork",
"description": "Test",
"zoneUuid": "d81c3d3d008e46038b8a38fee595fe41",
"physicalInterface": "eth0",
"type": "L2VlanNetwork",
"session": {
"uuid": "8be9f1f0d55b4f1cb6a088c376dc8128"
}
}
}
*
* @msg
* {
"org.zstack.header.network.l2.APICreateL2VlanNetworkMsg": {
"vlan": 10,
"name": "TestL2VlanNetwork",
"description": "Test",
"zoneUuid": "d81c3d3d008e46038b8a38fee595fe41",
"physicalInterface": "eth0",
"type": "L2VlanNetwork",
"session": {
"uuid": "8be9f1f0d55b4f1cb6a088c376dc8128"
},
"timeout": 1800000,
"id": "a0a5829f12fe4c45855967f6fa0c0afa",
"serviceId": "api.portal"
}
}
*
* @result
* see :ref:`APICreateL2VlanNetworkEvent`
*/
public class APICreateL2VlanNetworkMsg extends APICreateL2NetworkMsg {
/**
* @desc vlan id
*/
@APIParam(numberRange = {0, 4094})
private Integer vlan;
public int getVlan() {
return vlan;
}
public void setVlan(int vlan) {
this.vlan = vlan;
}
@Override
public String getType() {
return L2NetworkConstant.L2_VLAN_NETWORK_TYPE;
}
}
| header/src/main/java/org/zstack/header/network/l2/APICreateL2VlanNetworkMsg.java | package org.zstack.header.network.l2;
import org.zstack.header.message.APIParam;
/**
* @api
* create a l2VlanNetwork
*
* @category l2network
*
* @since 0.1.0
*
* @cli
*
* @httpMsg
* {
"org.zstack.header.network.l2.APICreateL2VlanNetworkMsg": {
"vlan": 10,
"name": "TestL2VlanNetwork",
"description": "Test",
"zoneUuid": "d81c3d3d008e46038b8a38fee595fe41",
"physicalInterface": "eth0",
"type": "L2VlanNetwork",
"session": {
"uuid": "8be9f1f0d55b4f1cb6a088c376dc8128"
}
}
}
*
* @msg
* {
"org.zstack.header.network.l2.APICreateL2VlanNetworkMsg": {
"vlan": 10,
"name": "TestL2VlanNetwork",
"description": "Test",
"zoneUuid": "d81c3d3d008e46038b8a38fee595fe41",
"physicalInterface": "eth0",
"type": "L2VlanNetwork",
"session": {
"uuid": "8be9f1f0d55b4f1cb6a088c376dc8128"
},
"timeout": 1800000,
"id": "a0a5829f12fe4c45855967f6fa0c0afa",
"serviceId": "api.portal"
}
}
*
* @result
* see :ref:`APICreateL2VlanNetworkEvent`
*/
public class APICreateL2VlanNetworkMsg extends APICreateL2NetworkMsg {
/**
* @desc vlan id
*/
@APIParam(numberRange = {0, 4095})
private Integer vlan;
public int getVlan() {
return vlan;
}
public void setVlan(int vlan) {
this.vlan = vlan;
}
@Override
public String getType() {
return L2NetworkConstant.L2_VLAN_NETWORK_TYPE;
}
}
| vlan 4095 is not supported
for https://github.com/zxwing/premium/issues/160
| header/src/main/java/org/zstack/header/network/l2/APICreateL2VlanNetworkMsg.java | vlan 4095 is not supported |
|
Java | apache-2.0 | 32ecd9c3d430e5a1a3cab757126f124d3f5ad10f | 0 | AndroidX/androidx,AndroidX/androidx,androidx/androidx,aosp-mirror/platform_frameworks_support,aosp-mirror/platform_frameworks_support,androidx/androidx,AndroidX/androidx,androidx/androidx,aosp-mirror/platform_frameworks_support,androidx/androidx,androidx/androidx,aosp-mirror/platform_frameworks_support,androidx/androidx,AndroidX/androidx,AndroidX/androidx,AndroidX/androidx,aosp-mirror/platform_frameworks_support,AndroidX/androidx,AndroidX/androidx,AndroidX/androidx,androidx/androidx,AndroidX/androidx,androidx/androidx,androidx/androidx,androidx/androidx | /*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package android.support.v17.leanback.app;
import android.support.v17.leanback.R;
import android.animation.Animator;
import android.animation.ValueAnimator;
import android.app.Activity;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ColorFilter;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.drawable.ColorDrawable;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.LayerDrawable;
import android.os.Handler;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.view.animation.Interpolator;
import android.view.animation.LinearInterpolator;
/**
* Supports background image continuity between multiple Activities.
*
* <p>An Activity should instantiate a BackgroundManager and {@link #attach}
* to the Activity's window. When the Activity is started, the background is
* initialized to the current background values stored in a continuity service.
* The background continuity service is updated as the background is updated.
*
* <p>At some point, for example when it is stopped, the Activity may release
* its background state.
*
* <p>When an Activity is resumed, if the BackgroundManager has not been
* released, the continuity service is updated from the BackgroundManager state.
* If the BackgroundManager was released, the BackgroundManager inherits the
* current state from the continuity service.
*
* <p>When the last Activity is destroyed, the background state is reset.
*
* <p>Backgrounds consist of several layers, from back to front:
* <ul>
* <li>the background Drawable of the theme</li>
* <li>a solid color (set via {@link #setColor})</li>
* <li>two Drawables, previous and current (set via {@link #setBitmap} or
* {@link #setDrawable}), which may be in transition</li>
* </ul>
*
* <p>BackgroundManager holds references to potentially large bitmap Drawables.
* Call {@link #release} to release these references when the Activity is not
* visible.
*/
// TODO: support for multiple app processes requires a proper android service
// instead of the shared memory "service" implemented here. Such a service could
// support continuity between fragments of different applications if desired.
public final class BackgroundManager {
private static final String TAG = "BackgroundManager";
private static final boolean DEBUG = false;
private static final int FULL_ALPHA = 255;
private static final int DIM_ALPHA_ON_SOLID = (int) (0.8f * FULL_ALPHA);
private static final int CHANGE_BG_DELAY_MS = 500;
private static final int FADE_DURATION = 500;
/**
* Using a separate window for backgrounds can improve graphics performance by
* leveraging hardware display layers.
* TODO: support a leanback configuration option.
*/
private static final boolean USE_SEPARATE_WINDOW = false;
private static final String WINDOW_NAME = "BackgroundManager";
private static final String FRAGMENT_TAG = BackgroundManager.class.getCanonicalName();
private Context mContext;
private Handler mHandler;
private Window mWindow;
private WindowManager mWindowManager;
private View mBgView;
private BackgroundContinuityService mService;
private int mThemeDrawableResourceId;
private int mHeightPx;
private int mWidthPx;
private Drawable mBackgroundDrawable;
private int mBackgroundColor;
private boolean mAttached;
private static class BitmapDrawable extends Drawable {
static class ConstantState extends Drawable.ConstantState {
Bitmap mBitmap;
Matrix mMatrix;
Paint mPaint;
@Override
public Drawable newDrawable() {
return new BitmapDrawable(null, mBitmap, mMatrix);
}
@Override
public int getChangingConfigurations() {
return 0;
}
}
private ConstantState mState = new ConstantState();
BitmapDrawable(Resources resources, Bitmap bitmap) {
this(resources, bitmap, null);
}
BitmapDrawable(Resources resources, Bitmap bitmap, Matrix matrix) {
mState.mBitmap = bitmap;
mState.mMatrix = matrix != null ? matrix : new Matrix();
mState.mPaint = new Paint();
mState.mPaint.setFilterBitmap(true);
}
Bitmap getBitmap() {
return mState.mBitmap;
}
@Override
public void draw(Canvas canvas) {
if (mState.mBitmap == null) {
return;
}
canvas.drawBitmap(mState.mBitmap, mState.mMatrix, mState.mPaint);
}
@Override
public int getOpacity() {
return android.graphics.PixelFormat.OPAQUE;
}
@Override
public void setAlpha(int alpha) {
if (mState.mPaint.getAlpha() != alpha) {
mState.mPaint.setAlpha(alpha);
invalidateSelf();
}
}
@Override
public void setColorFilter(ColorFilter cf) {
// Abstract in Drawable, not implemented
}
@Override
public ConstantState getConstantState() {
return mState;
}
}
private static class DrawableWrapper {
protected int mAlpha;
protected Drawable mDrawable;
protected ValueAnimator mAnimator;
protected boolean mAnimationPending;
private final Interpolator mInterpolator = new LinearInterpolator();
private final ValueAnimator.AnimatorUpdateListener mAnimationUpdateListener =
new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
setAlpha((Integer) animation.getAnimatedValue());
}
};
public DrawableWrapper(Drawable drawable) {
mDrawable = drawable;
setAlpha(FULL_ALPHA);
}
public Drawable getDrawable() {
return mDrawable;
}
public void setAlpha(int alpha) {
mAlpha = alpha;
mDrawable.setAlpha(alpha);
}
public int getAlpha() {
return mAlpha;
}
public void setColor(int color) {
((ColorDrawable) mDrawable).setColor(color);
}
public void fadeIn(int durationMs, int delayMs) {
fade(durationMs, delayMs, FULL_ALPHA);
}
public void fadeOut(int durationMs) {
fade(durationMs, 0, 0);
}
public void fade(int durationMs, int delayMs, int alpha) {
if (mAnimator != null && mAnimator.isStarted()) {
mAnimator.cancel();
}
mAnimator = ValueAnimator.ofInt(getAlpha(), alpha);
mAnimator.addUpdateListener(mAnimationUpdateListener);
mAnimator.setInterpolator(mInterpolator);
mAnimator.setDuration(durationMs);
mAnimator.setStartDelay(delayMs);
mAnimationPending = true;
}
public boolean isAnimationPending() {
return mAnimationPending;
}
public boolean isAnimationStarted() {
return mAnimator != null && mAnimator.isStarted();
}
public void startAnimation() {
startAnimation(null);
}
public void startAnimation(Animator.AnimatorListener listener) {
if (listener != null) {
mAnimator.addListener(listener);
}
mAnimator.start();
mAnimationPending = false;
}
}
private LayerDrawable mLayerDrawable;
private DrawableWrapper mLayerWrapper;
private DrawableWrapper mImageInWrapper;
private DrawableWrapper mImageOutWrapper;
private DrawableWrapper mColorWrapper;
private DrawableWrapper mDimWrapper;
private Drawable mThemeDrawable;
private ChangeBackgroundRunnable mChangeRunnable;
/**
* Shared memory continuity service.
*/
private static class BackgroundContinuityService {
private static final String TAG = "BackgroundContinuityService";
private static boolean DEBUG = BackgroundManager.DEBUG;
private static BackgroundContinuityService sService = new BackgroundContinuityService();
private int mColor;
private Drawable mDrawable;
private int mCount;
private BackgroundContinuityService() {
reset();
}
private void reset() {
mColor = Color.TRANSPARENT;
mDrawable = null;
}
public static BackgroundContinuityService getInstance() {
final int count = sService.mCount++;
if (DEBUG) Log.v(TAG, "Returning instance with new count " + count);
return sService;
}
public void unref() {
if (mCount <= 0) throw new IllegalStateException("Can't unref, count " + mCount);
if (--mCount == 0) {
if (DEBUG) Log.v(TAG, "mCount is zero, resetting");
reset();
}
}
public int getColor() {
return mColor;
}
public Drawable getDrawable() {
return mDrawable;
}
public void setColor(int color) {
mColor = color;
}
public void setDrawable(Drawable drawable) {
mDrawable = drawable;
}
}
private Drawable getThemeDrawable() {
Drawable drawable = null;
if (mThemeDrawableResourceId != -1) {
drawable = mContext.getResources().getDrawable(mThemeDrawableResourceId);
}
if (drawable == null) {
drawable = createEmptyDrawable();
}
return drawable;
}
/**
* Get the BackgroundManager associated with the Activity.
* <p>
* The BackgroundManager will be created on-demand for each individual
* Activity. Subsequent calls will return the same BackgroundManager created
* for this Activity.
*/
public static BackgroundManager getInstance(Activity activity) {
BackgroundFragment fragment = (BackgroundFragment) activity.getFragmentManager()
.findFragmentByTag(FRAGMENT_TAG);
if (fragment != null) {
BackgroundManager manager = fragment.getBackgroundManager();
if (manager != null) {
return manager;
}
// manager is null: this is a fragment restored by FragmentManager,
// fall through to create a BackgroundManager attach to it.
}
return new BackgroundManager(activity);
}
/**
* Construct a BackgroundManager instance. The Initial background is set
* from the continuity service.
* @deprecated Use getInstance(Activity).
*/
@Deprecated
public BackgroundManager(Activity activity) {
mContext = activity;
mService = BackgroundContinuityService.getInstance();
mHeightPx = mContext.getResources().getDisplayMetrics().heightPixels;
mWidthPx = mContext.getResources().getDisplayMetrics().widthPixels;
mHandler = new Handler();
TypedArray ta = activity.getTheme().obtainStyledAttributes(new int[] {
android.R.attr.windowBackground });
mThemeDrawableResourceId = ta.getResourceId(0, -1);
if (mThemeDrawableResourceId < 0) {
if (DEBUG) Log.v(TAG, "BackgroundManager no window background resource!");
}
ta.recycle();
createFragment(activity);
}
private void createFragment(Activity activity) {
// Use a fragment to ensure the background manager gets detached properly.
BackgroundFragment fragment = (BackgroundFragment) activity.getFragmentManager()
.findFragmentByTag(FRAGMENT_TAG);
if (fragment == null) {
fragment = new BackgroundFragment();
activity.getFragmentManager().beginTransaction().add(fragment, FRAGMENT_TAG).commit();
} else {
if (fragment.getBackgroundManager() != null) {
throw new IllegalStateException("Created duplicated BackgroundManager for same " +
"activity, please use getInstance() instead");
}
}
fragment.setBackgroundManager(this);
}
/**
* Synchronizes state when the owning Activity is resumed.
*/
void onActivityResume() {
if (mService == null) {
return;
}
if (mLayerDrawable == null) {
if (DEBUG) Log.v(TAG, "onActivityResume " + this +
" released state, syncing with service");
syncWithService();
} else {
if (DEBUG) Log.v(TAG, "onActivityResume " + this + " updating service color "
+ mBackgroundColor + " drawable " + mBackgroundDrawable);
mService.setColor(mBackgroundColor);
mService.setDrawable(mBackgroundDrawable);
}
}
private void syncWithService() {
int color = mService.getColor();
Drawable drawable = mService.getDrawable();
if (DEBUG) Log.v(TAG, "syncWithService color " + Integer.toHexString(color)
+ " drawable " + drawable);
mBackgroundColor = color;
mBackgroundDrawable = drawable == null ? null :
drawable.getConstantState().newDrawable().mutate();
updateImmediate();
}
private void lazyInit() {
if (mLayerDrawable != null) {
return;
}
mLayerDrawable = (LayerDrawable) mContext.getResources().getDrawable(
R.drawable.lb_background).mutate();
mBgView.setBackground(mLayerDrawable);
mLayerDrawable.setDrawableByLayerId(R.id.background_imageout, createEmptyDrawable());
mDimWrapper = new DrawableWrapper(
mLayerDrawable.findDrawableByLayerId(R.id.background_dim));
mLayerWrapper = new DrawableWrapper(mLayerDrawable);
mColorWrapper = new DrawableWrapper(
mLayerDrawable.findDrawableByLayerId(R.id.background_color));
}
/**
* Make the background visible on the given Window.
*/
public void attach(Window window) {
if (USE_SEPARATE_WINDOW) {
attachBehindWindow(window);
} else {
attachToView(window.getDecorView());
}
}
private void attachBehindWindow(Window window) {
if (DEBUG) Log.v(TAG, "attachBehindWindow " + window);
mWindow = window;
mWindowManager = window.getWindowManager();
WindowManager.LayoutParams params = new WindowManager.LayoutParams(
// Media window sits behind the main application window
WindowManager.LayoutParams.TYPE_APPLICATION_MEDIA,
// Avoid default to software format RGBA
WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED,
android.graphics.PixelFormat.TRANSLUCENT);
params.setTitle(WINDOW_NAME);
params.width = ViewGroup.LayoutParams.MATCH_PARENT;
params.height = ViewGroup.LayoutParams.MATCH_PARENT;
View backgroundView = LayoutInflater.from(mContext).inflate(
R.layout.lb_background_window, null);
mWindowManager.addView(backgroundView, params);
attachToView(backgroundView);
}
private void attachToView(View sceneRoot) {
mBgView = sceneRoot;
mAttached = true;
syncWithService();
}
/**
* Release references to Drawables and put the BackgroundManager into the
* detached state. Called when the associated Activity is destroyed.
* @hide
*/
void detach() {
if (DEBUG) Log.v(TAG, "detach " + this);
release();
if (mWindowManager != null && mBgView != null) {
mWindowManager.removeViewImmediate(mBgView);
}
mWindowManager = null;
mWindow = null;
mBgView = null;
mAttached = false;
if (mService != null) {
mService.unref();
mService = null;
}
}
/**
* Release references to Drawables. Typically called to reduce memory
* overhead when not visible.
* <p>
* When an Activity is resumed, if the BackgroundManager has not been
* released, the continuity service is updated from the BackgroundManager
* state. If the BackgroundManager was released, the BackgroundManager
* inherits the current state from the continuity service.
*/
public void release() {
if (DEBUG) Log.v(TAG, "release " + this);
if (mLayerDrawable != null) {
mLayerDrawable.setDrawableByLayerId(R.id.background_imagein, createEmptyDrawable());
mLayerDrawable.setDrawableByLayerId(R.id.background_imageout, createEmptyDrawable());
mLayerDrawable = null;
}
mLayerWrapper = null;
mImageInWrapper = null;
mImageOutWrapper = null;
mColorWrapper = null;
mDimWrapper = null;
mThemeDrawable = null;
if (mChangeRunnable != null) {
mChangeRunnable.cancel();
mChangeRunnable = null;
}
releaseBackgroundBitmap();
}
private void releaseBackgroundBitmap() {
mBackgroundDrawable = null;
}
private void updateImmediate() {
lazyInit();
mColorWrapper.setColor(mBackgroundColor);
if (mDimWrapper != null) {
mDimWrapper.setAlpha(mBackgroundColor == Color.TRANSPARENT ? 0 : DIM_ALPHA_ON_SOLID);
}
showWallpaper(mBackgroundColor == Color.TRANSPARENT);
mThemeDrawable = getThemeDrawable();
mLayerDrawable.setDrawableByLayerId(R.id.background_theme, mThemeDrawable);
if (mBackgroundDrawable == null) {
mLayerDrawable.setDrawableByLayerId(R.id.background_imagein, createEmptyDrawable());
} else {
if (DEBUG) Log.v(TAG, "Background drawable is available");
mImageInWrapper = new DrawableWrapper(mBackgroundDrawable);
mLayerDrawable.setDrawableByLayerId(R.id.background_imagein, mBackgroundDrawable);
if (mDimWrapper != null) {
mDimWrapper.setAlpha(FULL_ALPHA);
}
}
}
/**
* Set the background to the given color. The timing for when this becomes
* visible in the app is undefined and may take place after a small delay.
*/
public void setColor(int color) {
if (DEBUG) Log.v(TAG, "setColor " + Integer.toHexString(color));
mBackgroundColor = color;
mService.setColor(mBackgroundColor);
if (mColorWrapper != null) {
mColorWrapper.setColor(mBackgroundColor);
}
}
/**
* Set the given drawable into the background. The provided Drawable will be
* used unmodified as the background, without any scaling or cropping
* applied to it. The timing for when this becomes visible in the app is
* undefined and may take place after a small delay.
*/
public void setDrawable(Drawable drawable) {
if (DEBUG) Log.v(TAG, "setBackgroundDrawable " + drawable);
setDrawableInternal(drawable);
}
private void setDrawableInternal(Drawable drawable) {
if (!mAttached) {
throw new IllegalStateException("Must attach before setting background drawable");
}
if (mChangeRunnable != null) {
mChangeRunnable.cancel();
}
mChangeRunnable = new ChangeBackgroundRunnable(drawable);
if (mImageInWrapper != null && mImageInWrapper.isAnimationStarted()) {
if (DEBUG) Log.v(TAG, "animation in progress");
} else {
mHandler.postDelayed(mChangeRunnable, CHANGE_BG_DELAY_MS);
}
}
/**
* Set the given bitmap into the background. When using setBitmap to set the
* background, the provided bitmap will be scaled and cropped to correctly
* fit within the dimensions of the view. The timing for when this becomes
* visible in the app is undefined and may take place after a small delay.
*/
public void setBitmap(Bitmap bitmap) {
if (DEBUG) {
Log.v(TAG, "setBitmap " + bitmap);
}
if (bitmap == null) {
setDrawableInternal(null);
return;
}
if (bitmap.getWidth() <= 0 || bitmap.getHeight() <= 0) {
if (DEBUG) {
Log.v(TAG, "invalid bitmap width or height");
}
return;
}
Matrix matrix = null;
if ((bitmap.getWidth() != mWidthPx || bitmap.getHeight() != mHeightPx)) {
int dwidth = bitmap.getWidth();
int dheight = bitmap.getHeight();
float scale;
// Scale proportionately to fit width and height.
if (dwidth * mHeightPx > mWidthPx * dheight) {
scale = (float) mHeightPx / (float) dheight;
} else {
scale = (float) mWidthPx / (float) dwidth;
}
int subX = Math.min((int) (mWidthPx / scale), dwidth);
int dx = Math.max(0, (dwidth - subX) / 2);
matrix = new Matrix();
matrix.setScale(scale, scale);
matrix.preTranslate(-dx, 0);
if (DEBUG) Log.v(TAG, "original image size " + bitmap.getWidth() + "x" + bitmap.getHeight() +
" scale " + scale + " dx " + dx);
}
BitmapDrawable bitmapDrawable = new BitmapDrawable(mContext.getResources(), bitmap, matrix);
setDrawableInternal(bitmapDrawable);
}
private void applyBackgroundChanges() {
if (!mAttached || mLayerWrapper == null) {
return;
}
if (DEBUG) Log.v(TAG, "applyBackgroundChanges drawable " + mBackgroundDrawable);
int dimAlpha = 0;
if (mImageOutWrapper != null && mImageOutWrapper.isAnimationPending()) {
if (DEBUG) Log.v(TAG, "mImageOutWrapper animation starting");
mImageOutWrapper.startAnimation();
mImageOutWrapper = null;
dimAlpha = DIM_ALPHA_ON_SOLID;
}
if (mImageInWrapper == null && mBackgroundDrawable != null) {
if (DEBUG) Log.v(TAG, "creating new imagein drawable");
mImageInWrapper = new DrawableWrapper(mBackgroundDrawable);
mLayerDrawable.setDrawableByLayerId(R.id.background_imagein, mBackgroundDrawable);
if (DEBUG) Log.v(TAG, "mImageInWrapper animation starting");
mImageInWrapper.setAlpha(0);
mImageInWrapper.fadeIn(FADE_DURATION, 0);
mImageInWrapper.startAnimation(mImageInListener);
dimAlpha = FULL_ALPHA;
}
if (mDimWrapper != null && dimAlpha != 0) {
if (DEBUG) Log.v(TAG, "dimwrapper animation starting to " + dimAlpha);
mDimWrapper.fade(FADE_DURATION, 0, dimAlpha);
mDimWrapper.startAnimation();
}
}
private final Animator.AnimatorListener mImageInListener = new Animator.AnimatorListener() {
@Override
public void onAnimationStart(Animator animation) {
}
@Override
public void onAnimationRepeat(Animator animation) {
}
@Override
public void onAnimationEnd(Animator animation) {
if (mChangeRunnable != null) {
if (DEBUG) Log.v(TAG, "animation ended, found change runnable");
mChangeRunnable.run();
}
}
@Override
public void onAnimationCancel(Animator animation) {
}
};
/**
* Returns the current background color.
*/
public final int getColor() {
return mBackgroundColor;
}
/**
* Returns the current background {@link Drawable}.
*/
public Drawable getDrawable() {
return mBackgroundDrawable;
}
private boolean sameDrawable(Drawable first, Drawable second) {
if (first == null || second == null) {
return false;
}
if (first == second) {
return true;
}
if (first instanceof BitmapDrawable && second instanceof BitmapDrawable) {
if (((BitmapDrawable) first).getBitmap().sameAs(((BitmapDrawable) second).getBitmap())) {
return true;
}
}
return false;
}
/**
* Task which changes the background.
*/
class ChangeBackgroundRunnable implements Runnable {
private Drawable mDrawable;
private boolean mCancel;
ChangeBackgroundRunnable(Drawable drawable) {
mDrawable = drawable;
}
public void cancel() {
mCancel = true;
}
@Override
public void run() {
if (!mCancel) {
runTask();
}
}
private void runTask() {
lazyInit();
if (sameDrawable(mDrawable, mBackgroundDrawable)) {
if (DEBUG) Log.v(TAG, "same bitmap detected");
return;
}
releaseBackgroundBitmap();
if (mImageInWrapper != null) {
mImageOutWrapper = new DrawableWrapper(mImageInWrapper.getDrawable());
mImageOutWrapper.setAlpha(mImageInWrapper.getAlpha());
mImageOutWrapper.fadeOut(FADE_DURATION);
// Order is important! Setting a drawable "removes" the
// previous one from the view
mLayerDrawable.setDrawableByLayerId(R.id.background_imagein, createEmptyDrawable());
mLayerDrawable.setDrawableByLayerId(R.id.background_imageout,
mImageOutWrapper.getDrawable());
mImageInWrapper.setAlpha(0);
mImageInWrapper = null;
}
mBackgroundDrawable = mDrawable;
mService.setDrawable(mBackgroundDrawable);
applyBackgroundChanges();
mChangeRunnable = null;
}
}
private Drawable createEmptyDrawable() {
Bitmap bitmap = null;
return new BitmapDrawable(mContext.getResources(), bitmap);
}
private void showWallpaper(boolean show) {
if (mWindow == null) {
return;
}
WindowManager.LayoutParams layoutParams = mWindow.getAttributes();
if (show) {
if ((layoutParams.flags & WindowManager.LayoutParams.FLAG_SHOW_WALLPAPER) != 0) {
return;
}
if (DEBUG) Log.v(TAG, "showing wallpaper");
layoutParams.flags |= WindowManager.LayoutParams.FLAG_SHOW_WALLPAPER;
} else {
if ((layoutParams.flags & WindowManager.LayoutParams.FLAG_SHOW_WALLPAPER) == 0) {
return;
}
if (DEBUG) Log.v(TAG, "hiding wallpaper");
layoutParams.flags &= ~WindowManager.LayoutParams.FLAG_SHOW_WALLPAPER;
}
mWindow.setAttributes(layoutParams);
}
}
| v17/leanback/src/android/support/v17/leanback/app/BackgroundManager.java | /*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package android.support.v17.leanback.app;
import android.support.v17.leanback.R;
import android.animation.Animator;
import android.animation.ObjectAnimator;
import android.app.Activity;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ColorFilter;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.drawable.ColorDrawable;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.LayerDrawable;
import android.os.Handler;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.view.animation.LinearInterpolator;
/**
* Supports background image continuity between multiple Activities.
*
* <p>An Activity should instantiate a BackgroundManager and {@link #attach}
* to the Activity's window. When the Activity is started, the background is
* initialized to the current background values stored in a continuity service.
* The background continuity service is updated as the background is updated.
*
* <p>At some point, for example when it is stopped, the Activity may release
* its background state.
*
* <p>When an Activity is resumed, if the BackgroundManager has not been
* released, the continuity service is updated from the BackgroundManager state.
* If the BackgroundManager was released, the BackgroundManager inherits the
* current state from the continuity service.
*
* <p>When the last Activity is destroyed, the background state is reset.
*
* <p>Backgrounds consist of several layers, from back to front:
* <ul>
* <li>the background Drawable of the theme</li>
* <li>a solid color (set via {@link #setColor})</li>
* <li>two Drawables, previous and current (set via {@link #setBitmap} or
* {@link #setDrawable}), which may be in transition</li>
* </ul>
*
* <p>BackgroundManager holds references to potentially large bitmap Drawables.
* Call {@link #release} to release these references when the Activity is not
* visible.
*/
// TODO: support for multiple app processes requires a proper android service
// instead of the shared memory "service" implemented here. Such a service could
// support continuity between fragments of different applications if desired.
public final class BackgroundManager {
private static final String TAG = "BackgroundManager";
private static final boolean DEBUG = false;
private static final int FULL_ALPHA = 255;
private static final int DIM_ALPHA_ON_SOLID = (int) (0.8f * FULL_ALPHA);
private static final int CHANGE_BG_DELAY_MS = 500;
private static final int FADE_DURATION = 500;
/**
* Using a separate window for backgrounds can improve graphics performance by
* leveraging hardware display layers.
* TODO: support a leanback configuration option.
*/
private static final boolean USE_SEPARATE_WINDOW = false;
private static final String WINDOW_NAME = "BackgroundManager";
private static final String FRAGMENT_TAG = BackgroundManager.class.getCanonicalName();
private Context mContext;
private Handler mHandler;
private Window mWindow;
private WindowManager mWindowManager;
private View mBgView;
private BackgroundContinuityService mService;
private int mThemeDrawableResourceId;
private int mHeightPx;
private int mWidthPx;
private Drawable mBackgroundDrawable;
private int mBackgroundColor;
private boolean mAttached;
private static class BitmapDrawable extends Drawable {
static class ConstantState extends Drawable.ConstantState {
Bitmap mBitmap;
Matrix mMatrix;
Paint mPaint;
@Override
public Drawable newDrawable() {
return new BitmapDrawable(null, mBitmap, mMatrix);
}
@Override
public int getChangingConfigurations() {
return 0;
}
}
private ConstantState mState = new ConstantState();
BitmapDrawable(Resources resources, Bitmap bitmap) {
this(resources, bitmap, null);
}
BitmapDrawable(Resources resources, Bitmap bitmap, Matrix matrix) {
mState.mBitmap = bitmap;
mState.mMatrix = matrix != null ? matrix : new Matrix();
mState.mPaint = new Paint();
mState.mPaint.setFilterBitmap(true);
}
Bitmap getBitmap() {
return mState.mBitmap;
}
@Override
public void draw(Canvas canvas) {
if (mState.mBitmap == null) {
return;
}
canvas.drawBitmap(mState.mBitmap, mState.mMatrix, mState.mPaint);
}
@Override
public int getOpacity() {
return android.graphics.PixelFormat.OPAQUE;
}
@Override
public void setAlpha(int alpha) {
if (mState.mPaint.getAlpha() != alpha) {
mState.mPaint.setAlpha(alpha);
invalidateSelf();
}
}
@Override
public void setColorFilter(ColorFilter cf) {
// Abstract in Drawable, not implemented
}
@Override
public ConstantState getConstantState() {
return mState;
}
}
private static class DrawableWrapper {
protected int mAlpha;
protected Drawable mDrawable;
protected ObjectAnimator mAnimator;
protected boolean mAnimationPending;
public DrawableWrapper(Drawable drawable) {
mDrawable = drawable;
setAlpha(FULL_ALPHA);
}
public Drawable getDrawable() {
return mDrawable;
}
public void setAlpha(int alpha) {
mAlpha = alpha;
mDrawable.setAlpha(alpha);
}
public int getAlpha() {
return mAlpha;
}
public void setColor(int color) {
((ColorDrawable) mDrawable).setColor(color);
}
public void fadeIn(int durationMs, int delayMs) {
fade(durationMs, delayMs, FULL_ALPHA);
}
public void fadeOut(int durationMs) {
fade(durationMs, 0, 0);
}
public void fade(int durationMs, int delayMs, int alpha) {
if (mAnimator != null && mAnimator.isStarted()) {
mAnimator.cancel();
}
mAnimator = ObjectAnimator.ofInt(this, "alpha", alpha);
mAnimator.setInterpolator(new LinearInterpolator());
mAnimator.setDuration(durationMs);
mAnimator.setStartDelay(delayMs);
mAnimationPending = true;
}
public boolean isAnimationPending() {
return mAnimationPending;
}
public boolean isAnimationStarted() {
return mAnimator != null && mAnimator.isStarted();
}
public void startAnimation() {
startAnimation(null);
}
public void startAnimation(Animator.AnimatorListener listener) {
if (listener != null) {
mAnimator.addListener(listener);
}
mAnimator.start();
mAnimationPending = false;
}
}
private LayerDrawable mLayerDrawable;
private DrawableWrapper mLayerWrapper;
private DrawableWrapper mImageInWrapper;
private DrawableWrapper mImageOutWrapper;
private DrawableWrapper mColorWrapper;
private DrawableWrapper mDimWrapper;
private Drawable mThemeDrawable;
private ChangeBackgroundRunnable mChangeRunnable;
/**
* Shared memory continuity service.
*/
private static class BackgroundContinuityService {
private static final String TAG = "BackgroundContinuityService";
private static boolean DEBUG = BackgroundManager.DEBUG;
private static BackgroundContinuityService sService = new BackgroundContinuityService();
private int mColor;
private Drawable mDrawable;
private int mCount;
private BackgroundContinuityService() {
reset();
}
private void reset() {
mColor = Color.TRANSPARENT;
mDrawable = null;
}
public static BackgroundContinuityService getInstance() {
final int count = sService.mCount++;
if (DEBUG) Log.v(TAG, "Returning instance with new count " + count);
return sService;
}
public void unref() {
if (mCount <= 0) throw new IllegalStateException("Can't unref, count " + mCount);
if (--mCount == 0) {
if (DEBUG) Log.v(TAG, "mCount is zero, resetting");
reset();
}
}
public int getColor() {
return mColor;
}
public Drawable getDrawable() {
return mDrawable;
}
public void setColor(int color) {
mColor = color;
}
public void setDrawable(Drawable drawable) {
mDrawable = drawable;
}
}
private Drawable getThemeDrawable() {
Drawable drawable = null;
if (mThemeDrawableResourceId != -1) {
drawable = mContext.getResources().getDrawable(mThemeDrawableResourceId);
}
if (drawable == null) {
drawable = createEmptyDrawable();
}
return drawable;
}
/**
* Get the BackgroundManager associated with the Activity.
* <p>
* The BackgroundManager will be created on-demand for each individual
* Activity. Subsequent calls will return the same BackgroundManager created
* for this Activity.
*/
public static BackgroundManager getInstance(Activity activity) {
BackgroundFragment fragment = (BackgroundFragment) activity.getFragmentManager()
.findFragmentByTag(FRAGMENT_TAG);
if (fragment != null) {
BackgroundManager manager = fragment.getBackgroundManager();
if (manager != null) {
return manager;
}
// manager is null: this is a fragment restored by FragmentManager,
// fall through to create a BackgroundManager attach to it.
}
return new BackgroundManager(activity);
}
/**
* Construct a BackgroundManager instance. The Initial background is set
* from the continuity service.
* @deprecated Use getInstance(Activity).
*/
@Deprecated
public BackgroundManager(Activity activity) {
mContext = activity;
mService = BackgroundContinuityService.getInstance();
mHeightPx = mContext.getResources().getDisplayMetrics().heightPixels;
mWidthPx = mContext.getResources().getDisplayMetrics().widthPixels;
mHandler = new Handler();
TypedArray ta = activity.getTheme().obtainStyledAttributes(new int[] {
android.R.attr.windowBackground });
mThemeDrawableResourceId = ta.getResourceId(0, -1);
if (mThemeDrawableResourceId < 0) {
if (DEBUG) Log.v(TAG, "BackgroundManager no window background resource!");
}
ta.recycle();
createFragment(activity);
}
private void createFragment(Activity activity) {
// Use a fragment to ensure the background manager gets detached properly.
BackgroundFragment fragment = (BackgroundFragment) activity.getFragmentManager()
.findFragmentByTag(FRAGMENT_TAG);
if (fragment == null) {
fragment = new BackgroundFragment();
activity.getFragmentManager().beginTransaction().add(fragment, FRAGMENT_TAG).commit();
} else {
if (fragment.getBackgroundManager() != null) {
throw new IllegalStateException("Created duplicated BackgroundManager for same " +
"activity, please use getInstance() instead");
}
}
fragment.setBackgroundManager(this);
}
/**
* Synchronizes state when the owning Activity is resumed.
*/
void onActivityResume() {
if (mService == null) {
return;
}
if (mLayerDrawable == null) {
if (DEBUG) Log.v(TAG, "onActivityResume " + this +
" released state, syncing with service");
syncWithService();
} else {
if (DEBUG) Log.v(TAG, "onActivityResume " + this + " updating service color "
+ mBackgroundColor + " drawable " + mBackgroundDrawable);
mService.setColor(mBackgroundColor);
mService.setDrawable(mBackgroundDrawable);
}
}
private void syncWithService() {
int color = mService.getColor();
Drawable drawable = mService.getDrawable();
if (DEBUG) Log.v(TAG, "syncWithService color " + Integer.toHexString(color)
+ " drawable " + drawable);
mBackgroundColor = color;
mBackgroundDrawable = drawable == null ? null :
drawable.getConstantState().newDrawable().mutate();
updateImmediate();
}
private void lazyInit() {
if (mLayerDrawable != null) {
return;
}
mLayerDrawable = (LayerDrawable) mContext.getResources().getDrawable(
R.drawable.lb_background).mutate();
mBgView.setBackground(mLayerDrawable);
mLayerDrawable.setDrawableByLayerId(R.id.background_imageout, createEmptyDrawable());
mDimWrapper = new DrawableWrapper(
mLayerDrawable.findDrawableByLayerId(R.id.background_dim));
mLayerWrapper = new DrawableWrapper(mLayerDrawable);
mColorWrapper = new DrawableWrapper(
mLayerDrawable.findDrawableByLayerId(R.id.background_color));
}
/**
* Make the background visible on the given Window.
*/
public void attach(Window window) {
if (USE_SEPARATE_WINDOW) {
attachBehindWindow(window);
} else {
attachToView(window.getDecorView());
}
}
private void attachBehindWindow(Window window) {
if (DEBUG) Log.v(TAG, "attachBehindWindow " + window);
mWindow = window;
mWindowManager = window.getWindowManager();
WindowManager.LayoutParams params = new WindowManager.LayoutParams(
// Media window sits behind the main application window
WindowManager.LayoutParams.TYPE_APPLICATION_MEDIA,
// Avoid default to software format RGBA
WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED,
android.graphics.PixelFormat.TRANSLUCENT);
params.setTitle(WINDOW_NAME);
params.width = ViewGroup.LayoutParams.MATCH_PARENT;
params.height = ViewGroup.LayoutParams.MATCH_PARENT;
View backgroundView = LayoutInflater.from(mContext).inflate(
R.layout.lb_background_window, null);
mWindowManager.addView(backgroundView, params);
attachToView(backgroundView);
}
private void attachToView(View sceneRoot) {
mBgView = sceneRoot;
mAttached = true;
syncWithService();
}
/**
* Release references to Drawables and put the BackgroundManager into the
* detached state. Called when the associated Activity is destroyed.
* @hide
*/
void detach() {
if (DEBUG) Log.v(TAG, "detach " + this);
release();
if (mWindowManager != null && mBgView != null) {
mWindowManager.removeViewImmediate(mBgView);
}
mWindowManager = null;
mWindow = null;
mBgView = null;
mAttached = false;
if (mService != null) {
mService.unref();
mService = null;
}
}
/**
* Release references to Drawables. Typically called to reduce memory
* overhead when not visible.
* <p>
* When an Activity is resumed, if the BackgroundManager has not been
* released, the continuity service is updated from the BackgroundManager
* state. If the BackgroundManager was released, the BackgroundManager
* inherits the current state from the continuity service.
*/
public void release() {
if (DEBUG) Log.v(TAG, "release " + this);
if (mLayerDrawable != null) {
mLayerDrawable.setDrawableByLayerId(R.id.background_imagein, createEmptyDrawable());
mLayerDrawable.setDrawableByLayerId(R.id.background_imageout, createEmptyDrawable());
mLayerDrawable = null;
}
mLayerWrapper = null;
mImageInWrapper = null;
mImageOutWrapper = null;
mColorWrapper = null;
mDimWrapper = null;
mThemeDrawable = null;
if (mChangeRunnable != null) {
mChangeRunnable.cancel();
mChangeRunnable = null;
}
releaseBackgroundBitmap();
}
private void releaseBackgroundBitmap() {
mBackgroundDrawable = null;
}
private void updateImmediate() {
lazyInit();
mColorWrapper.setColor(mBackgroundColor);
if (mDimWrapper != null) {
mDimWrapper.setAlpha(mBackgroundColor == Color.TRANSPARENT ? 0 : DIM_ALPHA_ON_SOLID);
}
showWallpaper(mBackgroundColor == Color.TRANSPARENT);
mThemeDrawable = getThemeDrawable();
mLayerDrawable.setDrawableByLayerId(R.id.background_theme, mThemeDrawable);
if (mBackgroundDrawable == null) {
mLayerDrawable.setDrawableByLayerId(R.id.background_imagein, createEmptyDrawable());
} else {
if (DEBUG) Log.v(TAG, "Background drawable is available");
mImageInWrapper = new DrawableWrapper(mBackgroundDrawable);
mLayerDrawable.setDrawableByLayerId(R.id.background_imagein, mBackgroundDrawable);
if (mDimWrapper != null) {
mDimWrapper.setAlpha(FULL_ALPHA);
}
}
}
/**
* Set the background to the given color. The timing for when this becomes
* visible in the app is undefined and may take place after a small delay.
*/
public void setColor(int color) {
if (DEBUG) Log.v(TAG, "setColor " + Integer.toHexString(color));
mBackgroundColor = color;
mService.setColor(mBackgroundColor);
if (mColorWrapper != null) {
mColorWrapper.setColor(mBackgroundColor);
}
}
/**
* Set the given drawable into the background. The provided Drawable will be
* used unmodified as the background, without any scaling or cropping
* applied to it. The timing for when this becomes visible in the app is
* undefined and may take place after a small delay.
*/
public void setDrawable(Drawable drawable) {
if (DEBUG) Log.v(TAG, "setBackgroundDrawable " + drawable);
setDrawableInternal(drawable);
}
private void setDrawableInternal(Drawable drawable) {
if (!mAttached) {
throw new IllegalStateException("Must attach before setting background drawable");
}
if (mChangeRunnable != null) {
mChangeRunnable.cancel();
}
mChangeRunnable = new ChangeBackgroundRunnable(drawable);
if (mImageInWrapper != null && mImageInWrapper.isAnimationStarted()) {
if (DEBUG) Log.v(TAG, "animation in progress");
} else {
mHandler.postDelayed(mChangeRunnable, CHANGE_BG_DELAY_MS);
}
}
/**
* Set the given bitmap into the background. When using setBitmap to set the
* background, the provided bitmap will be scaled and cropped to correctly
* fit within the dimensions of the view. The timing for when this becomes
* visible in the app is undefined and may take place after a small delay.
*/
public void setBitmap(Bitmap bitmap) {
if (DEBUG) {
Log.v(TAG, "setBitmap " + bitmap);
}
if (bitmap == null) {
setDrawableInternal(null);
return;
}
if (bitmap.getWidth() <= 0 || bitmap.getHeight() <= 0) {
if (DEBUG) {
Log.v(TAG, "invalid bitmap width or height");
}
return;
}
Matrix matrix = null;
if ((bitmap.getWidth() != mWidthPx || bitmap.getHeight() != mHeightPx)) {
int dwidth = bitmap.getWidth();
int dheight = bitmap.getHeight();
float scale;
// Scale proportionately to fit width and height.
if (dwidth * mHeightPx > mWidthPx * dheight) {
scale = (float) mHeightPx / (float) dheight;
} else {
scale = (float) mWidthPx / (float) dwidth;
}
int subX = Math.min((int) (mWidthPx / scale), dwidth);
int dx = Math.max(0, (dwidth - subX) / 2);
matrix = new Matrix();
matrix.setScale(scale, scale);
matrix.preTranslate(-dx, 0);
if (DEBUG) Log.v(TAG, "original image size " + bitmap.getWidth() + "x" + bitmap.getHeight() +
" scale " + scale + " dx " + dx);
}
BitmapDrawable bitmapDrawable = new BitmapDrawable(mContext.getResources(), bitmap, matrix);
setDrawableInternal(bitmapDrawable);
}
private void applyBackgroundChanges() {
if (!mAttached || mLayerWrapper == null) {
return;
}
if (DEBUG) Log.v(TAG, "applyBackgroundChanges drawable " + mBackgroundDrawable);
int dimAlpha = 0;
if (mImageOutWrapper != null && mImageOutWrapper.isAnimationPending()) {
if (DEBUG) Log.v(TAG, "mImageOutWrapper animation starting");
mImageOutWrapper.startAnimation();
mImageOutWrapper = null;
dimAlpha = DIM_ALPHA_ON_SOLID;
}
if (mImageInWrapper == null && mBackgroundDrawable != null) {
if (DEBUG) Log.v(TAG, "creating new imagein drawable");
mImageInWrapper = new DrawableWrapper(mBackgroundDrawable);
mLayerDrawable.setDrawableByLayerId(R.id.background_imagein, mBackgroundDrawable);
if (DEBUG) Log.v(TAG, "mImageInWrapper animation starting");
mImageInWrapper.setAlpha(0);
mImageInWrapper.fadeIn(FADE_DURATION, 0);
mImageInWrapper.startAnimation(mImageInListener);
dimAlpha = FULL_ALPHA;
}
if (mDimWrapper != null && dimAlpha != 0) {
if (DEBUG) Log.v(TAG, "dimwrapper animation starting to " + dimAlpha);
mDimWrapper.fade(FADE_DURATION, 0, dimAlpha);
mDimWrapper.startAnimation();
}
}
private final Animator.AnimatorListener mImageInListener = new Animator.AnimatorListener() {
@Override
public void onAnimationStart(Animator animation) {
}
@Override
public void onAnimationRepeat(Animator animation) {
}
@Override
public void onAnimationEnd(Animator animation) {
if (mChangeRunnable != null) {
if (DEBUG) Log.v(TAG, "animation ended, found change runnable");
mChangeRunnable.run();
}
}
@Override
public void onAnimationCancel(Animator animation) {
}
};
/**
* Returns the current background color.
*/
public final int getColor() {
return mBackgroundColor;
}
/**
* Returns the current background {@link Drawable}.
*/
public Drawable getDrawable() {
return mBackgroundDrawable;
}
private boolean sameDrawable(Drawable first, Drawable second) {
if (first == null || second == null) {
return false;
}
if (first == second) {
return true;
}
if (first instanceof BitmapDrawable && second instanceof BitmapDrawable) {
if (((BitmapDrawable) first).getBitmap().sameAs(((BitmapDrawable) second).getBitmap())) {
return true;
}
}
return false;
}
/**
* Task which changes the background.
*/
class ChangeBackgroundRunnable implements Runnable {
private Drawable mDrawable;
private boolean mCancel;
ChangeBackgroundRunnable(Drawable drawable) {
mDrawable = drawable;
}
public void cancel() {
mCancel = true;
}
@Override
public void run() {
if (!mCancel) {
runTask();
}
}
private void runTask() {
lazyInit();
if (sameDrawable(mDrawable, mBackgroundDrawable)) {
if (DEBUG) Log.v(TAG, "same bitmap detected");
return;
}
releaseBackgroundBitmap();
if (mImageInWrapper != null) {
mImageOutWrapper = new DrawableWrapper(mImageInWrapper.getDrawable());
mImageOutWrapper.setAlpha(mImageInWrapper.getAlpha());
mImageOutWrapper.fadeOut(FADE_DURATION);
// Order is important! Setting a drawable "removes" the
// previous one from the view
mLayerDrawable.setDrawableByLayerId(R.id.background_imagein, createEmptyDrawable());
mLayerDrawable.setDrawableByLayerId(R.id.background_imageout,
mImageOutWrapper.getDrawable());
mImageInWrapper.setAlpha(0);
mImageInWrapper = null;
}
mBackgroundDrawable = mDrawable;
mService.setDrawable(mBackgroundDrawable);
applyBackgroundChanges();
mChangeRunnable = null;
}
}
private Drawable createEmptyDrawable() {
Bitmap bitmap = null;
return new BitmapDrawable(mContext.getResources(), bitmap);
}
private void showWallpaper(boolean show) {
if (mWindow == null) {
return;
}
WindowManager.LayoutParams layoutParams = mWindow.getAttributes();
if (show) {
if ((layoutParams.flags & WindowManager.LayoutParams.FLAG_SHOW_WALLPAPER) != 0) {
return;
}
if (DEBUG) Log.v(TAG, "showing wallpaper");
layoutParams.flags |= WindowManager.LayoutParams.FLAG_SHOW_WALLPAPER;
} else {
if ((layoutParams.flags & WindowManager.LayoutParams.FLAG_SHOW_WALLPAPER) == 0) {
return;
}
if (DEBUG) Log.v(TAG, "hiding wallpaper");
layoutParams.flags &= ~WindowManager.LayoutParams.FLAG_SHOW_WALLPAPER;
}
mWindow.setAttributes(layoutParams);
}
}
| am 71ace352: Merge "Use ValueAnimator instead of ObjectAnimator." into lmp-preview-dev
* commit '71ace352963715f82ed0ecede3f55f3808af30b0':
Use ValueAnimator instead of ObjectAnimator.
| v17/leanback/src/android/support/v17/leanback/app/BackgroundManager.java | am 71ace352: Merge "Use ValueAnimator instead of ObjectAnimator." into lmp-preview-dev |
|
Java | apache-2.0 | 066be35e628e11aad4f90a6ba959f08e27e38463 | 0 | iamfigo/redis-cluster-manager,iamfigo/redis-cluster-manager | package com.huit.util;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.mongodb.MongoClient;
import com.mongodb.WriteConcern;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import org.bson.Document;
import redis.clients.jedis.*;
import redis.clients.jedis.exceptions.JedisConnectionException;
import redis.clients.util.JedisClusterCRC16;
import java.io.*;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
/**
* redis cluster 管理工具
*
* @author huit
*/
public class RedisClusterManager {
private static String REDIS_HOST = SystemConf.get("REDIS_HOST");
private static int REDIS_PORT = Integer.parseInt(SystemConf.get("REDIS_PORT"));
private static JedisCluster cluster;
static final int DEFAULT_TIMEOUT = 2000;
static final int MAX_REDIRECTIONS = 25;//应该大于等于主节点数
static ScanParams sp = new ScanParams();
static {
sp.count(10000);
}
private static void connectCluser() {
Set<HostAndPort> nodes = new HashSet<HostAndPort>();
nodes.add(new HostAndPort(REDIS_HOST, REDIS_PORT));
JedisPoolConfig poolConfig = new JedisPoolConfig();
poolConfig.setMaxTotal(1000);
poolConfig.setMaxIdle(10);
poolConfig.setMinIdle(1);
poolConfig.setMaxWaitMillis(30000);
poolConfig.setTestOnBorrow(true);
poolConfig.setTestOnReturn(true);
poolConfig.setTestWhileIdle(true);
cluster = new JedisCluster(nodes, DEFAULT_TIMEOUT, MAX_REDIRECTIONS, poolConfig);
}
public RedisClusterManager() {
REDIS_HOST = SystemConf.get("REDIS_HOST");
REDIS_PORT = Integer.valueOf(SystemConf.get("REDIS_PORT"));
}
private static AtomicLong writeCount = new AtomicLong();
private static AtomicLong lastWriteCount = new AtomicLong();
private static AtomicLong scanCount = new AtomicLong();
private static AtomicLong delCount = new AtomicLong();
private static AtomicLong checkCount = new AtomicLong();
private static AtomicLong errorCount = new AtomicLong();
private static AtomicLong lastReadCount = new AtomicLong();
private static long writeBeginTime = System.currentTimeMillis(), readLastCountTime, writeLastCountTime;
private static final DecimalFormat speedFormat = new DecimalFormat("#,##0.00");//格式化设置
private static boolean isCompleted = false;
/**
* 删除点赞
*
* @throws Exception
*/
public void praiseDel(final String delKey, final String filePath) throws Exception {
BufferedReader br = new BufferedReader(new FileReader(filePath));
BufferedWriter bw = new BufferedWriter(new FileWriter(filePath + ".deleted"));
String data;
long delCount = 0, readCount = 0;
while ((data = br.readLine()) != null) {
readCount++;
Double score = cluster.zscore(delKey, data.trim());
if (null != score) {
long reslut = cluster.zrem(delKey, data.trim());
if (1 == reslut) {
delCount++;
bw.write(data.trim() + "->" + score);
bw.write("\r\n");
}
}
}
br.close();
bw.close();
System.out.println("readCount:" + readCount + " delCount:" + delCount);
}
/**
* 不真正删除,只计算点赞数
*
* @throws Exception
*/
public void praiseCountDel(final String delKey, final String filePath) throws Exception {
BufferedReader br = new BufferedReader(new FileReader(filePath));
BufferedWriter bw = new BufferedWriter(new FileWriter(filePath + ".deleted"));
String data;
long markedDelCount = 0, readCount = 0;
while ((data = br.readLine()) != null) {
readCount++;
String value = data.trim();
Double score = cluster.zscore(delKey, value);
if (null != score) {
markedDelCount++;
bw.write(value + "->" + score);
bw.write("\r\n");
}
}
br.close();
bw.close();
System.out.println("checkKey" + delKey + " readCount:" + readCount + " markedCount:" + markedDelCount);
}
/**
* 按照key前缀查询
*
*/
public void praiseCount(final String importKey, final String filePath) {
final List<String> dataQueue = Collections.synchronizedList(new LinkedList<String>());// 待处理数据队列
final Thread[] writeThread = new Thread[cluster.getClusterNodes().size() * 3];//节点数的3倍
final Map<String, AtomicLong> statisticsMap = new TreeMap<String, AtomicLong>();
Thread readThread = new Thread(new Runnable() {
@Override
public void run() {
try {
String cursor = "0";
Date date = new Date();
java.text.DateFormat format1 = new java.text.SimpleDateFormat("yyyy-MM-dd-HH");
do {
ScanResult<Tuple> sscanResult = cluster.zscan(importKey, cursor, sp);
cursor = sscanResult.getStringCursor();
List<Tuple> result = sscanResult.getResult();
double time;
for (Tuple tuple : result) {
dataQueue.add(tuple.getElement());
time = tuple.getScore();
date.setTime((long) (time * 1000));
String key = format1.format(date);
AtomicLong count = statisticsMap.get(key);
if (null == count) {
count = new AtomicLong();
statisticsMap.put(key, count);
}
count.incrementAndGet();
}
long count = scanCount.addAndGet(result.size());
if (count % 50000 == 0) {
if (readLastCountTime > 0) {
long useTime = System.currentTimeMillis() - readLastCountTime;
float speed = (float) ((count - lastReadCount.get()) / (useTime / 1000.0));
System.out.println("read count:" + count + " speed:" + speedFormat.format(speed));
}
readLastCountTime = System.currentTimeMillis();
lastReadCount.set(count);
synchronized (dataQueue) {
Collections.shuffle(dataQueue);//导出是按节点导出的,这样可以提升性能
}
while (dataQueue.size() > 100000) {//防止内存写爆了
Thread.sleep(1000);
}
}
} while (!"0".equals(cursor));
synchronized (dataQueue) {
Collections.shuffle(dataQueue);
}
isCompleted = true;
while (!dataQueue.isEmpty()) {//等待数据写入完成
Thread.sleep(500);
}
long useTime = System.currentTimeMillis() - writeBeginTime, totalCount = scanCount.get();
float speed = (float) (totalCount / (useTime / 1000.0));
System.out.println("write total:" + totalCount + " speed:" + speedFormat.format(speed)
+ " useTime:" + (useTime / 1000.0) + "s");
for (int i = 0; i <= writeThread.length - 1; i++) {
writeThread[i].interrupt();
}
} catch (Exception e) {
e.printStackTrace();
}
}
});
readThread.start();
for (int i = 0; i <= writeThread.length - 1; i++) {
writeThread[i] = new Thread(new Runnable() {
@Override
public void run() {
while (!isCompleted || !dataQueue.isEmpty()) {
String uid = null;
if (dataQueue.isEmpty()) {
try {
Thread.sleep(100);
continue;
} catch (InterruptedException e) {
e.printStackTrace();
}
} else {
try {
synchronized (dataQueue) {
uid = dataQueue.remove(0);
}
} catch (IndexOutOfBoundsException e) {
continue;
}
}
long uf = cluster.zcard("u_f_" + uid);
long ua = cluster.zcard("u_a_" + uid);
long up = cluster.zcard("u_p_" + uid);
String info = "uid:" + uid + " uf:" + uf + " ua:" + ua + " up:" + up;
if (uf == 0 && ua <= 1 && up == 2) {
long count = writeCount.incrementAndGet();
System.out.println("marked->" + info);
if (count % 10000 == 0) {
if (writeLastCountTime > 0) {
long useTime = System.currentTimeMillis() - writeLastCountTime;
float speed = (float) ((count - lastWriteCount.get()) / (useTime / 1000.0));
System.out.println("write count:" + count + "/" + scanCount + " speed:"
+ speedFormat.format(speed));
}
writeLastCountTime = System.currentTimeMillis();
lastWriteCount.set(count);
}
}
}
}
}, "write thread [" + i + "]");
writeThread[i].setDaemon(true);
writeThread[i].start();
}
for (Thread thread : writeThread) {
do {
if (thread.isAlive()) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
} while (thread.isAlive());
}
System.out.println("statisticsMap->begin");
Iterator<Entry<String, AtomicLong>> it = statisticsMap.entrySet().iterator();
while (it.hasNext()) {
Entry<String, AtomicLong> entry = it.next();
System.out.println(entry.getKey() + "->" + entry.getValue());
}
System.out.println("statisticsMap->end");
long useTime = System.currentTimeMillis() - writeBeginTime, totalCount = writeCount.get();
float speed = (float) (totalCount / (useTime / 1000.0));
System.out.println("scanCount:" + scanCount.get() + " markedCount:" + totalCount + " errorCount:"
+ errorCount.get() + " speed:" + speedFormat.format(speed) + " useTime:" + (useTime / 1000.0) + "s");
}
/**
* 清理恢复数据导致的垃圾数据
*/
public void followAttentionDel(String importKey, final String filePath) throws IOException {
BufferedWriter bw = new BufferedWriter(new FileWriter(filePath + ".deleted"));
final String[] keys = importKey.split(",");
long delCount = 0;
for (String key : keys) {
if (key.startsWith("u_a_")) {
String zcursor = "0";
do {
ScanResult<Tuple> sscanResult = cluster.zscan(key, zcursor, sp);
zcursor = sscanResult.getStringCursor();
for (Tuple data : sscanResult.getResult()) {
String followUid = data.getElement();
String uid = key.substring("u_a_".length());
String keyDel = "u_a_" + followUid;
long result = cluster.zrem(keyDel, uid);//移除关注的人对自己的关注
delCount++;
bw.write("result:" + result + " " + keyDel + "->" + data.getScore());
bw.write("\r\n");
}
} while (!"0".equals(zcursor));
}
}
bw.close();
System.out.println("followDel->delCount:" + delCount);
}
/**
* 删除用户粉丝
*/
public void followDel(String importKey, final String filePath) throws IOException {
BufferedWriter bw = new BufferedWriter(new FileWriter(filePath + ".deleted"));
final String[] keys = importKey.split(",");
long delCount = 0;
for (String key : keys) {
if (key.startsWith("u_f_")) {
String zcursor = "0";
do {
ScanResult<Tuple> sscanResult = cluster.zscan(key, zcursor, sp);
zcursor = sscanResult.getStringCursor();
for (Tuple data : sscanResult.getResult()) {
String followUid = data.getElement();
String uid = key.substring("u_f_".length());
cluster.zrem("u_a_" + followUid, uid);//从粉丝队列移除对自己的关注
delCount++;
bw.write(followUid + "->" + data.getScore());
bw.write("\r\n");
}
} while (!"0".equals(zcursor));
}
}
bw.close();
System.out.println("followDel->delCount:" + delCount);
}
/**
* 恢复用户粉丝
*/
public void followRestore(String importKey, final String filePath) {
final String[] importKeyPre = importKey.split(",");
final List<JSONObject> dataQueue = Collections.synchronizedList(new LinkedList<JSONObject>());// 待处理数据队列
final Thread[] writeThread = new Thread[cluster.getClusterNodes().size() * 3];//节点数的3倍
Thread readThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader br = new BufferedReader(new FileReader(filePath));
String data = null;
while ((data = br.readLine()) != null) {
JSONObject json = JSONObject.parseObject(data);
dataQueue.add(json);
long count = scanCount.incrementAndGet();
if (count % 50000 == 0) {
if (readLastCountTime > 0) {
long useTime = System.currentTimeMillis() - readLastCountTime;
float speed = (float) ((count - lastReadCount.get()) / (useTime / 1000.0));
System.out.println("read count:" + count + " speed:" + speedFormat.format(speed));
}
readLastCountTime = System.currentTimeMillis();
lastReadCount.set(count);
synchronized (dataQueue) {
Collections.shuffle(dataQueue);//导出是按节点导出的,这样可以提升性能
}
while (dataQueue.size() > 100000) {//防止内存写爆了
Thread.sleep(1000);
}
}
}
br.close();
synchronized (dataQueue) {
Collections.shuffle(dataQueue);
}
isCompleted = true;
while (!dataQueue.isEmpty()) {//等待数据写入完成
Thread.sleep(500);
}
long useTime = System.currentTimeMillis() - writeBeginTime, totalCount = scanCount.get();
float speed = (float) (totalCount / (useTime / 1000.0));
System.out.println("write total:" + totalCount + " speed:" + speedFormat.format(speed)
+ " useTime:" + (useTime / 1000.0) + "s");
} catch (Exception e) {
e.printStackTrace();
}
}
});
readThread.start();
for (int i = 0; i <= writeThread.length - 1; i++) {
writeThread[i] = new Thread(new Runnable() {
@Override
public void run() {
while (!isCompleted || !dataQueue.isEmpty()) {
JSONObject json = null;
if (dataQueue.isEmpty()) {
try {
Thread.sleep(100);
continue;
} catch (InterruptedException e) {
e.printStackTrace();
}
} else {
try {
synchronized (dataQueue) {
json = dataQueue.remove(0);
}
} catch (IndexOutOfBoundsException e) {
continue;
}
}
String key = json.getString("key");
String type = json.getString("type");
Object oject = json.get("value");
boolean isNeedImport = false;
for (String keyImport : importKeyPre) {
if ("*".equals(keyImport) || key.startsWith(keyImport)) {
isNeedImport = true;
break;
}
}
//list使用合并
if (isNeedImport) {
if ("hash".equals(type)) {
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
Map<String, String> hash = new HashMap<String, String>();
while (it.hasNext()) {
JSONObject jsonData = (JSONObject) it.next();
String dataKey = jsonData.getString("key");
String dataValue = jsonData.getString("value");
hash.put(dataKey, dataValue);
}
cluster.hmset(key, hash);
} else if ("string".equals(type)) {
String dataValue = (String) oject;
cluster.set(key, dataValue);
} else if ("list".equals(type)) {
JSONArray value = (JSONArray) oject;
List<String> inDb = cluster.lrange(key, 0, -1);
Iterator<Object> it = value.iterator();
while (it.hasNext()) {
String dataValue = (String) it.next();
if (!inDb.contains(dataValue)) {//list使用合并
cluster.rpush(key, dataValue);
} else {
// System.out.println("value:" + value);
}
}
} else if ("set".equals(type)) {
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
while (it.hasNext()) {
String dataValue = (String) it.next();
cluster.sadd(key, dataValue);
}
} else if ("zset".equals(type)) {
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
while (it.hasNext()) {
JSONObject jsonData = (JSONObject) it.next();
double score = jsonData.getLong("score");
String dataValue = jsonData.getString("value");
if (key.startsWith("u_f_")) {//粉丝队列
cluster.zadd(key, score, dataValue);//加粉丝
String uid = key.substring("u_f_".length());
cluster.zadd("u_a_" + dataValue, score, uid);//加关注
} else if (key.startsWith("u_a_")) {//关注队列
cluster.zadd(key, score, dataValue);//加关注
String uid = key.substring("u_a_".length());
cluster.zadd("u_f_" + dataValue, score, uid);//加粉丝
}
}
} else {
System.out.println("unknow keyType:" + type + "key:" + key);
}
long count = writeCount.incrementAndGet();
if (count % 10000 == 0) {
if (writeLastCountTime > 0) {
long useTime = System.currentTimeMillis() - writeLastCountTime;
float speed = (float) ((count - lastWriteCount.get()) / (useTime / 1000.0));
System.out.println("write count:" + count + "/" + scanCount + " speed:"
+ speedFormat.format(speed));
}
writeLastCountTime = System.currentTimeMillis();
lastWriteCount.set(count);
}
}
}
}
}, "write thread [" + i + "]");
writeThread[i].setDaemon(true);
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
writeThread[i].start();
}
}
/**
* 按照key前缀查询
*
*/
public void importKey(String importKey, final String filePath) {
final String[] importKeyPre = importKey.split(",");
final List<JSONObject> dataQueue = Collections.synchronizedList(new LinkedList<JSONObject>());// 待处理数据队列
final Thread[] writeThread = new Thread[cluster.getClusterNodes().size() * 3];//节点数的3倍
Thread readThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader br = new BufferedReader(new FileReader(filePath));
String data = null;
while ((data = br.readLine()) != null) {
JSONObject json = JSONObject.parseObject(data);
dataQueue.add(json);
long count = scanCount.incrementAndGet();
if (count % 50000 == 0) {
if (readLastCountTime > 0) {
long useTime = System.currentTimeMillis() - readLastCountTime;
float speed = (float) ((count - lastReadCount.get()) / (useTime / 1000.0));
System.out.println("read count:" + count + " speed:" + speedFormat.format(speed));
}
readLastCountTime = System.currentTimeMillis();
lastReadCount.set(count);
synchronized (dataQueue) {
Collections.shuffle(dataQueue);//导出是按节点导出的,这样可以提升性能
}
while (dataQueue.size() > 100000) {//防止内存写爆了
Thread.sleep(1000);
}
}
}
br.close();
synchronized (dataQueue) {
Collections.shuffle(dataQueue);
}
isCompleted = true;
while (!dataQueue.isEmpty()) {//等待数据写入完成
Thread.sleep(500);
}
long useTime = System.currentTimeMillis() - writeBeginTime, totalCount = scanCount.get();
float speed = (float) (totalCount / (useTime / 1000.0));
System.out.println("write total:" + totalCount + " speed:" + speedFormat.format(speed)
+ " useTime:" + (useTime / 1000.0) + "s");
for (int i = 0; i <= writeThread.length - 1; i++) {
writeThread[i].interrupt();
}
} catch (Exception e) {
e.printStackTrace();
}
}
});
readThread.start();
for (int i = 0; i <= writeThread.length - 1; i++) {
writeThread[i] = new Thread(new Runnable() {
@Override
public void run() {
while (!isCompleted || !dataQueue.isEmpty()) {
JSONObject json = null;
if (dataQueue.isEmpty()) {
try {
Thread.sleep(100);
continue;
} catch (InterruptedException e) {
e.printStackTrace();
}
} else {
try {
synchronized (dataQueue) {
json = dataQueue.remove(0);
}
} catch (IndexOutOfBoundsException e) {
continue;
}
}
String key = json.getString("key");
String type = json.getString("type");
Object oject = json.get("value");
boolean isNeedImport = false;
for (String keyImport : importKeyPre) {
if ("*".equals(keyImport) || key.startsWith(keyImport)) {
isNeedImport = true;
break;
}
}
//list使用合并
if (isNeedImport) {
if ("hash".equals(type)) {
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
Map<String, String> hash = new HashMap<String, String>();
while (it.hasNext()) {
JSONObject jsonData = (JSONObject) it.next();
String dataKey = jsonData.getString("key");
String dataValue = jsonData.getString("value");
hash.put(dataKey, dataValue);
}
cluster.hmset(key, hash);
} else if ("string".equals(type)) {
String dataValue = (String) oject;
cluster.set(key, dataValue);
} else if ("list".equals(type)) {
JSONArray value = (JSONArray) oject;
List<String> inDb = cluster.lrange(key, 0, -1);
Iterator<Object> it = value.iterator();
while (it.hasNext()) {
String dataValue = (String) it.next();
if (!inDb.contains(dataValue)) {//list使用合并
cluster.rpush(key, dataValue);
} else {
// System.out.println("value:" + value);
}
}
} else if ("set".equals(type)) {
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
while (it.hasNext()) {
String dataValue = (String) it.next();
cluster.sadd(key, dataValue);
}
} else if ("zset".equals(type)) {
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
while (it.hasNext()) {
JSONObject jsonData = (JSONObject) it.next();
double score = jsonData.getLong("score");
String dataValue = jsonData.getString("value");
cluster.zadd(key, score, dataValue);
}
} else {
System.out.println("unknow keyType:" + type + "key:" + key);
}
long count = writeCount.incrementAndGet();
if (count % 10000 == 0) {
if (writeLastCountTime > 0) {
long useTime = System.currentTimeMillis() - writeLastCountTime;
float speed = (float) ((count - lastWriteCount.get()) / (useTime / 1000.0));
System.out.println("write count:" + count + "/" + scanCount + " speed:"
+ speedFormat.format(speed));
}
writeLastCountTime = System.currentTimeMillis();
lastWriteCount.set(count);
}
}
}
}
}, "write thread [" + i + "]");
writeThread[i].setDaemon(true);
writeThread[i].start();
}
}
private void importMongodb(String KeyPre, String filePath) {
MongoClient mongo = new MongoClient("mycentos-01", 27017);
MongoDatabase db0 = mongo.getDatabase("db0");
BufferedReader br = null;
try {
br = new BufferedReader(new FileReader(filePath));
String data = null;
String[] importKeyPre = KeyPre.split(",");
while ((data = br.readLine()) != null) {
JSONObject json = JSONObject.parseObject(data);
String key = json.getString("key");
String type = json.getString("type");
Object oject = json.get("value");
boolean isNeedImport = false;
for (String keyImport : importKeyPre) {
if ("*".equals(keyImport) || key.startsWith(keyImport)) {
isNeedImport = true;
break;
}
}
//list使用合并
if (isNeedImport) {
int index = key.lastIndexOf("_");
String collectionName = null;
if (index > 0) {
collectionName = key.substring(0, index);
} else {
collectionName = "default";
}
if ("hash".equals(type)) {
try {
//db0.createCollection(collectionName);
} catch (Exception e) {
e.printStackTrace();
}
MongoCollection<Document> coll = db0.getCollection(collectionName);
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
Map<String, String> hash = new HashMap<String, String>();
Document info = new Document();
while (it.hasNext()) {
JSONObject jsonData = (JSONObject) it.next();
String dataKey = jsonData.getString("key");
String dataValue = jsonData.getString("value");
hash.put(dataKey, dataValue);
info.put(dataKey, dataValue);
}
coll.insertOne(info);
WriteConcern concern = coll.getWriteConcern();
concern.isAcknowledged();
//cluster.hmset(key, hash);
} else if ("string".equals(type)) {
String dataValue = (String) oject;
//cluster.set(key, dataValue);
} else if ("list".equals(type)) {
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
while (it.hasNext()) {
String dataValue = (String) it.next();
//cluster.rpush(key, dataValue);
}
} else if ("set".equals(type)) {
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
while (it.hasNext()) {
String dataValue = (String) it.next();
//cluster.sadd(key, dataValue);
}
} else if ("zset".equals(type)) {
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
while (it.hasNext()) {
JSONObject jsonData = (JSONObject) it.next();
double score = jsonData.getLong("score");
String dataValue = jsonData.getString("value");
//cluster.zadd(key, score, dataValue);
}
} else {
System.out.println("unknow keyType:" + type + "key:" + key);
}
long count = writeCount.incrementAndGet();
if (count % 10000 == 0) {
if (writeLastCountTime > 0) {
long useTime = System.currentTimeMillis() - writeLastCountTime;
float speed = (float) ((count - lastWriteCount.get()) / (useTime / 1000.0));
System.out.println("write count:" + count + "/" + scanCount + " speed:"
+ speedFormat.format(speed));
}
writeLastCountTime = System.currentTimeMillis();
lastWriteCount.set(count);
}
}
}
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
br.close();
} catch (IOException e) {
e.printStackTrace();
}
}
mongo.close();
}
/**
* 按key导出数据
*/
public void exportKeyPre(String keyPre, final String filePath) {
final String[] exportKeyPre = keyPre.split(",");
createExportFile(filePath + ".0");
Iterator<Entry<String, JedisPool>> nodes = cluster.getClusterNodes().entrySet().iterator();
List<Thread> exportTheadList = new ArrayList<Thread>();
while (nodes.hasNext()) {
Entry<String, JedisPool> entry = nodes.next();
try {
entry.getValue().getResource();
} catch (redis.clients.jedis.exceptions.JedisConnectionException e) {//有失败的节点连不上
System.out.println(entry.getKey() + " conn error:" + e.getMessage());
continue;
}
final Jedis nodeCli = entry.getValue().getResource();
String info = entry.getValue().getResource().info();
if (info.contains("role:slave")) {//只导出master
continue;
}
Thread exportThread = new Thread(new Runnable() {
@Override
public void run() {
String cursor = "0";
do {
ScanResult<String> keys = nodeCli.scan(cursor);
cursor = keys.getStringCursor();
List<String> result = keys.getResult();
for (String key : result) {
boolean isExport = false;
for (String keyExport : exportKeyPre) {
if ("*".equals(keyExport) || key.startsWith(keyExport)) {
isExport = true;
break;
}
}
long count = scanCount.incrementAndGet();
if (count % 1000000 == 0) {
if (readLastCountTime > 0) {
long useTime = System.currentTimeMillis() - readLastCountTime;
float speed = (float) ((count - lastReadCount.get()) / (useTime / 1000.0));
System.out.println("scan count:" + count + " speed:" + speedFormat.format(speed));
}
readLastCountTime = System.currentTimeMillis();
lastReadCount.set(count);
}
if (!isExport) {
continue;
}
JSONObject json = new JSONObject();
json.put("key", key);
String keyType = nodeCli.type(key);
json.put("type", keyType);
if ("hash".equals(keyType)) {
String hcursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<Entry<String, String>> hscanResult = nodeCli.hscan(key, hcursor, sp);
hcursor = hscanResult.getStringCursor();
for (Entry<String, String> entry : hscanResult.getResult()) {
JSONObject valueData = new JSONObject();
valueData.put("key", entry.getKey());
valueData.put("value", entry.getValue());
value.add(valueData);
}
} while (!"0".equals(hcursor));
json.put("value", value);
} else if ("string".equals(keyType)) {
json.put("value", nodeCli.get(key));
} else if ("list".equals(keyType)) {
int readSize, readCount = 100;
long start = 0, end = start + readCount;
JSONArray value = new JSONArray();
do {
List<String> data = nodeCli.lrange(key, start, end);
readSize = data.size();
for (int i = 0; i < readSize; i++) {
value.add(data.get(i));
}
start = end + 1;
end += readSize;
} while (readSize == readCount + 1);
json.put("value", value);
} else if ("set".equals(keyType)) {
String scursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<String> sscanResult = nodeCli.sscan(key, scursor, sp);
scursor = sscanResult.getStringCursor();
for (String data : sscanResult.getResult()) {
value.add(data);
}
} while (!"0".equals(scursor));
json.put("value", value);
} else if ("zset".equals(keyType)) {
String zcursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<Tuple> sscanResult = nodeCli.zscan(key, zcursor, sp);
zcursor = sscanResult.getStringCursor();
for (Tuple data : sscanResult.getResult()) {
JSONObject dataJson = new JSONObject();
dataJson.put("score", data.getScore());
dataJson.put("value", data.getElement());
value.add(dataJson);
}
} while (!"0".equals(zcursor));
json.put("value", value);
} else {
System.out.println("unknow keyType:" + keyType + "key:" + key);
}
json.put("time", System.currentTimeMillis());
writeFile(json.toJSONString(), "export", filePath);
}
} while (!"0".equals(cursor));
}
}, entry.getKey() + "export thread");
exportTheadList.add(exportThread);
exportThread.start();
}
waitThread(exportTheadList);
long useTime = System.currentTimeMillis() - writeBeginTime, totalCount = writeCount.get();
float speed = (float) (totalCount / (useTime / 1000.0));
System.out.println("scan count:" + scanCount.get() + " export total:" + totalCount + " speed:"
+ speedFormat.format(speed) + " useTime:" + (useTime / 1000.0) + "s");
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
private static BufferedWriter bw = null;
public static synchronized void createExportFile(String filePath) {
String pathDir = filePath.substring(0, filePath.lastIndexOf("/"));
File file = new File(pathDir);
if (!file.isDirectory()) {
file.mkdirs();
}
File f = new File(filePath);
FileOutputStream fos = null;
try {
fos = new FileOutputStream(f);
// write UTF8 BOM mark if file is empty
if (f.length() < 1) {
final byte[] bom = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
fos.write(bom);
}
} catch (IOException ex) {
} finally {
try {
fos.close();
} catch (Exception ex) {
}
}
try {
bw = new BufferedWriter(new FileWriter(filePath, true));
} catch (IOException e) {
e.printStackTrace();
}
}
private static final long FILE_PARTITION_LINE_COUNT = 1000000;//100W
public static synchronized void writeFile(String data, String optType, String filePath) {
try {
if (null == bw) {
createExportFile(filePath + ".0");
}
bw.write(data);
bw.write('\r');
bw.write('\n');
long count = writeCount.incrementAndGet();
if (count % 100000 == 0) {
if (writeLastCountTime > 0) {
long useTime = System.currentTimeMillis() - writeLastCountTime;
float speed = (float) ((count - lastWriteCount.get()) / (useTime / 1000.0));
System.out.println(optType + " count:" + count + " speed:" + speedFormat.format(speed));
}
writeLastCountTime = System.currentTimeMillis();
lastWriteCount.set(count);
}
if (count % FILE_PARTITION_LINE_COUNT == 0) {//分文件 100W
createExportFile(filePath + "." + (count / FILE_PARTITION_LINE_COUNT));
}
} catch (IOException e) {
e.printStackTrace();
}
}
public void exportKeysFile(String keyFilePath, String filePath) {
BufferedReader br = null;
try {
br = new BufferedReader(new FileReader(keyFilePath));
String data = null;
while ((data = br.readLine()) != null) {
exportKeys(data, filePath);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (null != br) {
br.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
* 按照key前缀查询
*
*/
public void importKey2(final String indexKey, String preKey, final String filePath) {
final String[] importKeyPre = indexKey.split(",");
final List<JSONObject> dataQueue = Collections.synchronizedList(new LinkedList<JSONObject>());// 待处理数据队列
final Thread[] writeThread = new Thread[cluster.getClusterNodes().size() * 3];//节点数的3倍
Thread readThread = new Thread(new Runnable() {
@Override
public void run() {
String hcursor = "0";
JSONObject json = new JSONObject();
do {
ScanResult<Tuple> hscanResult = cluster.zscan(indexKey, hcursor, sp);
hcursor = hscanResult.getStringCursor();
String fileExt;
for (Tuple entry : hscanResult.getResult()) {
String uidKey = entry.getElement();
long zcard = cluster.zcard("u_f_" + uidKey);
json.put("uid", uidKey);
json.put("zcard", zcard);
if (zcard > 1000) {
fileExt = "1000+";
} else if (zcard > 500 && zcard <= 1000) {
fileExt = "500-1000";
} else if (zcard > 300 && zcard <= 500) {
fileExt = "300-500";
} else if (zcard > 200 && zcard <= 300) {
fileExt = "200-300";
} else if (zcard > 100 && zcard <= 200) {
fileExt = "100-200";
} else if (zcard >= 1 && zcard <= 100) {
fileExt = "1-100";
} else {
fileExt = "0";
}
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter(filePath + fileExt, true));
bw.write(json.toJSONString());
bw.write('\r');
bw.write('\n');
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
long count = scanCount.incrementAndGet();
if (count % 10000 == 0) {
if (readLastCountTime > 0) {
long useTime = System.currentTimeMillis() - readLastCountTime;
float speed = (float) ((count - lastReadCount.get()) / (useTime / 1000.0));
System.out.println(" count:" + count + " speed:" + speedFormat.format(speed));
}
readLastCountTime = System.currentTimeMillis();
lastReadCount.set(count);
}
}
} while (!"0".equals(hcursor));
try {
BufferedReader br = new BufferedReader(new FileReader(filePath));
String data = null;
while ((data = br.readLine()) != null) {
dataQueue.add(json);
long count = scanCount.incrementAndGet();
if (count % 50000 == 0) {
if (readLastCountTime > 0) {
long useTime = System.currentTimeMillis() - readLastCountTime;
float speed = (float) ((count - lastReadCount.get()) / (useTime / 1000.0));
System.out.println("read count:" + count + " speed:" + speedFormat.format(speed));
}
readLastCountTime = System.currentTimeMillis();
lastReadCount.set(count);
synchronized (dataQueue) {
Collections.shuffle(dataQueue);//导出是按节点导出的,这样可以提升性能
}
while (dataQueue.size() > 100000) {//防止内存写爆了
Thread.sleep(1000);
}
}
}
br.close();
synchronized (dataQueue) {
Collections.shuffle(dataQueue);
}
isCompleted = true;
while (!dataQueue.isEmpty()) {//等待数据写入完成
Thread.sleep(500);
}
long useTime = System.currentTimeMillis() - writeBeginTime, totalCount = scanCount.get();
float speed = (float) (totalCount / (useTime / 1000.0));
System.out.println("write total:" + totalCount + " speed:" + speedFormat.format(speed)
+ " useTime:" + (useTime / 1000.0) + "s");
for (int i = 0; i <= writeThread.length - 1; i++) {
writeThread[i].interrupt();
}
} catch (Exception e) {
e.printStackTrace();
}
}
});
readThread.start();
for (int i = 0; i <= writeThread.length - 1; i++) {
writeThread[i] = new Thread(new Runnable() {
@Override
public void run() {
while (!isCompleted || !dataQueue.isEmpty()) {
JSONObject json = null;
if (dataQueue.isEmpty()) {
try {
Thread.sleep(100);
continue;
} catch (InterruptedException e) {
e.printStackTrace();
}
} else {
try {
synchronized (dataQueue) {
json = dataQueue.remove(0);
}
} catch (IndexOutOfBoundsException e) {
continue;
}
}
String key = json.getString("key");
String type = json.getString("type");
Object oject = json.get("value");
boolean isNeedImport = false;
for (String keyImport : importKeyPre) {
if ("*".equals(keyImport) || key.startsWith(keyImport)) {
isNeedImport = true;
break;
}
}
//list使用合并
if (isNeedImport) {
if ("hash".equals(type)) {
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
Map<String, String> hash = new HashMap<String, String>();
while (it.hasNext()) {
JSONObject jsonData = (JSONObject) it.next();
String dataKey = jsonData.getString("key");
String dataValue = jsonData.getString("value");
hash.put(dataKey, dataValue);
}
cluster.hmset(key, hash);
} else if ("string".equals(type)) {
String dataValue = (String) oject;
cluster.set(key, dataValue);
} else if ("list".equals(type)) {
JSONArray value = (JSONArray) oject;
List<String> inDb = cluster.lrange(key, 0, -1);
Iterator<Object> it = value.iterator();
while (it.hasNext()) {
String dataValue = (String) it.next();
if (!inDb.contains(dataValue)) {//list使用合并
cluster.rpush(key, dataValue);
} else {
// System.out.println("value:" + value);
}
}
} else if ("set".equals(type)) {
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
while (it.hasNext()) {
String dataValue = (String) it.next();
cluster.sadd(key, dataValue);
}
} else if ("zset".equals(type)) {
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
while (it.hasNext()) {
JSONObject jsonData = (JSONObject) it.next();
double score = jsonData.getLong("score");
String dataValue = jsonData.getString("value");
cluster.zadd(key, score, dataValue);
}
} else {
System.out.println("unknow keyType:" + type + "key:" + key);
}
long count = writeCount.incrementAndGet();
if (count % 10000 == 0) {
if (writeLastCountTime > 0) {
long useTime = System.currentTimeMillis() - writeLastCountTime;
float speed = (float) ((count - lastWriteCount.get()) / (useTime / 1000.0));
System.out.println("write count:" + count + "/" + scanCount + " speed:"
+ speedFormat.format(speed));
}
writeLastCountTime = System.currentTimeMillis();
lastWriteCount.set(count);
}
}
}
}
}, "write thread [" + i + "]");
writeThread[i].setDaemon(true);
writeThread[i].start();
}
}
/**
* 根据用户关注关系修复数据,如果已经关注却不在粉丝队列里向粉丝队列里添加;删除自己关注自己、自己是自己的粉丝、自己是自己的好友的数据
*/
public void uaCheck(final String filePath) {
final String u_a_ = "u_a_";
createExportFile(filePath);
Iterator<Entry<String, JedisPool>> nodes = cluster.getClusterNodes().entrySet().iterator();
List<Thread> exportTheadList = new ArrayList<Thread>();
while (nodes.hasNext()) {
Entry<String, JedisPool> entry = nodes.next();
try {
entry.getValue().getResource();
} catch (redis.clients.jedis.exceptions.JedisConnectionException e) {//有失败的节点连不上
System.out.println(entry.getKey() + " conn error:" + e.getMessage());
continue;
}
final Jedis nodeCli = entry.getValue().getResource();
String info = entry.getValue().getResource().info();
if (info.contains("role:slave")) {//只导出master
continue;
}
Thread exportThread = new Thread(new Runnable() {
@Override
public void run() {
String cursor = "0";
do {
ScanResult<String> keys = nodeCli.scan(cursor, sp);
cursor = keys.getStringCursor();
List<String> result = keys.getResult();
for (String key : result) {
long count = scanCount.incrementAndGet();
if (count % 1000000 == 0) {
if (readLastCountTime > 0) {
long useTime = System.currentTimeMillis() - readLastCountTime;
float speed = (float) ((count - lastReadCount.get()) / (useTime / 1000.0));
System.out.println("scanCount:" + count + " speed:" + speedFormat.format(speed)
+ " checkCount:" + checkCount.get() + " errorCount:" + errorCount.get());
}
readLastCountTime = System.currentTimeMillis();
lastReadCount.set(count);
}
String uid;
if (key.startsWith(u_a_)) {
uid = key.substring(4);
try {
Integer.valueOf(uid);
} catch (Exception e) {
continue;
}
} else {
continue;
}
String errorInfo;
Double score;
Date time = new Date();
if (null != (score = cluster.zscore("u_a_" + uid, uid))) {//自己关注自己的需要去掉
time.setTime((long) (score * 1000));
errorInfo = uid + "-u_a_>" + uid + " score:" + time;
cluster.zrem("u_a_" + uid, uid);
writeFile(errorInfo, "export", filePath);
}
if (null != (score = cluster.zscore("u_f_" + uid, uid))) {//自己是自己的粉丝需要去掉
time.setTime((long) (score * 1000));
errorInfo = uid + "-u_f_>" + uid + " score:" + time;
cluster.zrem("u_f_" + uid, uid);
writeFile(errorInfo, "export", filePath);
}
if (null != (score = cluster.zscore("u_friend_" + uid, uid))) {//去掉好友关系
time.setTime((long) (score * 1000));
errorInfo = uid + "-u_friend_>" + uid + " score:" + time;
cluster.zrem("u_friend_" + uid, uid);
writeFile(errorInfo, "export", filePath);
}
String zcursor = "0";
String u_a_id;
do {
ScanResult<Tuple> sscanResult = nodeCli.zscan(key, zcursor, sp);
zcursor = sscanResult.getStringCursor();
for (Tuple data : sscanResult.getResult()) {
u_a_id = data.getElement();
score = data.getScore();
checkCount.incrementAndGet();
if ("99521678".endsWith(u_a_id) || "88011458".equals(u_a_id)) {
continue;//种草君,假leo不管
}
if (null == cluster.zscore("u_f_" + u_a_id, uid)) {//关注了粉丝列表没有
cluster.zadd("u_f_" + u_a_id, score, uid);//向粉丝列表添加来修复数据
errorCount.incrementAndGet();
errorInfo = uid + "->" + u_a_id;
System.out.println(errorInfo);
writeFile(errorInfo, "export", filePath);
}
}
} while (!"0".equals(zcursor));
}
} while ((!"0".equals(cursor)));
}
}, entry.getKey() + "export thread");
exportTheadList.add(exportThread);
exportThread.start();
}
waitThread(exportTheadList);
long useTime = System.currentTimeMillis() - writeBeginTime, totalCount = writeCount.get();
float speed = (float) (totalCount / (useTime / 1000.0));
System.out.println("scan count:" + scanCount.get() + " export total:" + totalCount + " speed:"
+ speedFormat.format(speed) + " checkCount:" + checkCount.get() + " errorCount:" + errorCount.get()
+ " useTime:" + (useTime / 1000.0) + "s");
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 根据粉丝队列修复数据,如果没有关注从关注队列里去掉
*/
public void ufCheck(final String filePath) {
final String u_f_ = "u_f_";
createExportFile(filePath);
Iterator<Entry<String, JedisPool>> nodes = cluster.getClusterNodes().entrySet().iterator();
List<Thread> exportTheadList = new ArrayList<Thread>();
while (nodes.hasNext()) {
Entry<String, JedisPool> entry = nodes.next();
try {
entry.getValue().getResource();
} catch (redis.clients.jedis.exceptions.JedisConnectionException e) {//有失败的节点连不上
System.out.println(entry.getKey() + " conn error:" + e.getMessage());
continue;
}
final Jedis nodeCli = entry.getValue().getResource();
String info = entry.getValue().getResource().info();
if (info.contains("role:slave")) {//只导出master
continue;
}
Thread exportThread = new Thread(new Runnable() {
@Override
public void run() {
String cursor = "0";
do {
ScanResult<String> keys = nodeCli.scan(cursor, sp);
cursor = keys.getStringCursor();
List<String> result = keys.getResult();
for (String key : result) {
long count = scanCount.incrementAndGet();
if (count % 1000000 == 0) {
if (readLastCountTime > 0) {
long useTime = System.currentTimeMillis() - readLastCountTime;
float speed = (float) ((count - lastReadCount.get()) / (useTime / 1000.0));
System.out.println("scanCount:" + count + " speed:" + speedFormat.format(speed)
+ " checkCount:" + checkCount.get() + " errorCount:" + errorCount.get());
}
readLastCountTime = System.currentTimeMillis();
lastReadCount.set(count);
}
String uid;
if (key.startsWith(u_f_)) {
uid = key.substring(4);
if ("99521678".equals(uid)) {//种草君的不管
continue;
}
try {
Integer.valueOf(uid);
} catch (Exception e) {
continue;
}
} else {
continue;
}
String zcursor = "0";
String u_f_id;
do {
ScanResult<Tuple> sscanResult = nodeCli.zscan(key, zcursor, sp);
zcursor = sscanResult.getStringCursor();
for (Tuple data : sscanResult.getResult()) {
u_f_id = data.getElement();
checkCount.incrementAndGet();
if (null == cluster.zscore("u_a_" + u_f_id, uid)) {//粉丝表里有,关注列表里没有,需要删除
cluster.zrem(key, u_f_id);//删除粉丝列表的数据来修复
errorCount.incrementAndGet();
String errorInfo = uid + "->" + u_f_id;
System.out.println(errorInfo);
writeFile(errorInfo, "export", filePath);
}
}
} while (!"0".equals(zcursor));
}
} while ((!"0".equals(cursor)));
}
}, entry.getKey() + "export thread");
exportTheadList.add(exportThread);
exportThread.start();
}
waitThread(exportTheadList);
long useTime = System.currentTimeMillis() - writeBeginTime, totalCount = writeCount.get();
float speed = (float) (totalCount / (useTime / 1000.0));
System.out.println("scan count:" + scanCount.get() + " export total:" + totalCount + " speed:"
+ speedFormat.format(speed) + " checkCount:" + checkCount.get() + " errorCount:" + errorCount.get()
+ " useTime:" + (useTime / 1000.0) + "s");
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* hook线程
*/
static class CleanWorkThread extends Thread {
@Override
public void run() {
try {
if (null != bw) {
bw.close();
System.out.println("bw closed");
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
* 按key导出数据
*/
public void fansCount(final String filePath) {
final String[] exportKeyPre = "u_f_".split(",");
createExportFile(filePath);
Iterator<Entry<String, JedisPool>> nodes = cluster.getClusterNodes().entrySet().iterator();
List<Thread> exportTheadList = new ArrayList<Thread>();
while (nodes.hasNext()) {
Entry<String, JedisPool> entry = nodes.next();
try {
entry.getValue().getResource();
} catch (redis.clients.jedis.exceptions.JedisConnectionException e) {//有失败的节点连不上
System.out.println(entry.getKey() + " conn error:" + e.getMessage());
continue;
}
final Jedis nodeCli = entry.getValue().getResource();
String info = entry.getValue().getResource().info();
if (info.contains("role:slave")) {//只导出master
continue;
}
Thread exportThread = new Thread(new Runnable() {
@Override
public void run() {
String cursor = "0";
do {
ScanResult<String> keys = nodeCli.scan(cursor, sp);
cursor = keys.getStringCursor();
List<String> result = keys.getResult();
for (String key : result) {
boolean isExport = false;
for (String keyExport : exportKeyPre) {
if ("*".equals(keyExport) || key.startsWith(keyExport)) {
isExport = true;
break;
}
}
long count = scanCount.incrementAndGet();
if (count % 1000000 == 0) {
if (readLastCountTime > 0) {
long useTime = System.currentTimeMillis() - readLastCountTime;
float speed = (float) ((count - lastReadCount.get()) / (useTime / 1000.0));
System.out.println("scan count:" + count + " speed:" + speedFormat.format(speed));
}
readLastCountTime = System.currentTimeMillis();
lastReadCount.set(count);
}
if (!isExport) {
continue;
}
String keyType = nodeCli.type(key);
String uidKey = key.substring(key.lastIndexOf('_') + 1);
StringBuffer sb = new StringBuffer();
if ("zset".equals(keyType)) {
long zcard = cluster.zcard("u_f_" + uidKey);
if (0 == zcard) {//大于0的才统计
continue;
}
sb.append("\"").append(uidKey).append("\"").append(',').append(zcard).append(',');
List<String> nickname = cluster.hmget("rpcUserInfo" + uidKey, "nickname");
if (null != nickname && nickname.size() > 0 && null != nickname.get(0)) {
sb.append("\"").append(nickname.get(0).replace(",", "")).append("\"");
} else {
sb.append("\"\"");
}
}
writeFile(sb.toString(), "export", filePath);
}
} while ((!"0".equals(cursor)));
}
}, entry.getKey() + "export thread");
exportTheadList.add(exportThread);
exportThread.start();
}
waitThread(exportTheadList);
long useTime = System.currentTimeMillis() - writeBeginTime, totalCount = writeCount.get();
float speed = (float) (totalCount / (useTime / 1000.0));
System.out.println("scan count:" + scanCount.get() + " export total:" + totalCount + " speed:"
+ speedFormat.format(speed) + " useTime:" + (useTime / 1000.0) + "s");
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 按key导出数据
*/
public void keySizeCount(String key, String filePath) {
filePath += key;
String hcursor = "0";
JSONObject json = new JSONObject();
do {
ScanResult<Tuple> hscanResult = cluster.zscan(key, hcursor, sp);
hcursor = hscanResult.getStringCursor();
String fileExt;
for (Tuple entry : hscanResult.getResult()) {
String uidKey = entry.getElement();
long zcard = cluster.zcard("u_f_" + uidKey);
json.put("uid", uidKey);
json.put("zcard", zcard);
if (zcard > 100000) {
List<String> nickname = cluster.hmget("rpcUserInfo" + uidKey, "nickname");
if (null != nickname && nickname.size() > 0) {
json.put("nickname", nickname.get(0));
}
fileExt = "10W+";
} else if (zcard > 10000 && zcard <= 100000) {
fileExt = "1W-10W";
} else if (zcard > 1000 && zcard <= 10000) {
fileExt = "1k-1W";
} else if (zcard > 500 && zcard <= 1000) {
fileExt = "500-1000";
} else if (zcard > 300 && zcard <= 500) {
fileExt = "300-500";
} else if (zcard > 200 && zcard <= 300) {
fileExt = "200-300";
} else if (zcard > 100 && zcard <= 200) {
fileExt = "100-200";
} else if (zcard >= 1 && zcard <= 100) {
fileExt = "1-100";
} else {
fileExt = "0";
}
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter(filePath + fileExt, true));
bw.write(json.toJSONString());
bw.write('\r');
bw.write('\n');
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
long count = scanCount.incrementAndGet();
if (count % 10000 == 0) {
if (readLastCountTime > 0) {
long useTime = System.currentTimeMillis() - readLastCountTime;
float speed = (float) ((count - lastReadCount.get()) / (useTime / 1000.0));
System.out.println(" count:" + count + " speed:" + speedFormat.format(speed));
}
readLastCountTime = System.currentTimeMillis();
lastReadCount.set(count);
}
}
} while (!"0".equals(hcursor));
}
/**
* 按key导出数据
*/
public void exportHostKeys(String keys, String filePath) {
for (String key : keys.split(",")) {
JSONObject json = new JSONObject();
json.put("key", key);
String keyType = cluster.type(key);
json.put("type", keyType);
if ("hash".equals(keyType)) {
String hcursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<Entry<String, String>> hscanResult = cluster.hscan(key, hcursor, sp);
hcursor = hscanResult.getStringCursor();
for (Entry<String, String> entry : hscanResult.getResult()) {
JSONObject valueData = new JSONObject();
valueData.put("key", entry.getKey());
valueData.put("value", entry.getValue());
value.add(valueData);
}
} while (!"0".equals(hcursor));
json.put("value", value);
} else if ("string".equals(keyType)) {
json.put("value", cluster.get(key));
} else if ("list".equals(keyType)) {
int readSize, readCount = 1;
long start = 0, end = start + readCount;
JSONArray value = new JSONArray();
do {
List<String> data = cluster.lrange(key, start, end);
readSize = data.size();
for (int i = 0; i < readSize; i++) {
value.add(data.get(i));
}
start = end + 1;
end += readSize;
} while (readSize == readCount + 1);
json.put("value", value);
} else if ("set".equals(keyType)) {
String scursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<String> sscanResult = cluster.sscan(key, scursor, sp);
scursor = sscanResult.getStringCursor();
for (String data : sscanResult.getResult()) {
value.add(data);
}
} while (!"0".equals(scursor));
json.put("value", value);
} else if ("zset".equals(keyType)) {
String zcursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<Tuple> sscanResult = cluster.zscan(key, zcursor, sp);
zcursor = sscanResult.getStringCursor();
for (Tuple data : sscanResult.getResult()) {
JSONObject dataJson = new JSONObject();
dataJson.put("score", data.getScore());
dataJson.put("value", data.getElement());
value.add(dataJson);
}
} while (!"0".equals(zcursor));
json.put("value", value);
} else {
System.out.println("unknow keyType:" + keyType + "key:" + key);
}
synchronized (this) {//删除多线程里会调用这个方法
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter(filePath, true));
bw.write(json.toJSONString());
bw.write('\r');
bw.write('\n');
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
/**
* 按key导出数据
*/
public boolean exportKeys(String keys, String filePath) {
for (String key : keys.split(",")) {
JSONObject json = new JSONObject();
json.put("key", key);
String keyType = cluster.type(key);
json.put("type", keyType);
if ("hash".equals(keyType)) {
String hcursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<Entry<String, String>> hscanResult = cluster.hscan(key, hcursor, sp);
hcursor = hscanResult.getStringCursor();
for (Entry<String, String> entry : hscanResult.getResult()) {
JSONObject valueData = new JSONObject();
valueData.put("key", entry.getKey());
valueData.put("value", entry.getValue());
value.add(valueData);
}
} while (!"0".equals(hcursor));
json.put("value", value);
} else if ("string".equals(keyType)) {
json.put("value", cluster.get(key));
} else if ("list".equals(keyType)) {
int readSize, readCount = 1;
long start = 0, end = start + readCount;
JSONArray value = new JSONArray();
do {
List<String> data = cluster.lrange(key, start, end);
readSize = data.size();
for (int i = 0; i < readSize; i++) {
value.add(data.get(i));
}
start = end + 1;
end += readSize;
} while (readSize == readCount + 1);
json.put("value", value);
} else if ("set".equals(keyType)) {
String scursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<String> sscanResult = cluster.sscan(key, scursor, sp);
scursor = sscanResult.getStringCursor();
for (String data : sscanResult.getResult()) {
value.add(data);
}
} while (!"0".equals(scursor));
json.put("value", value);
} else if ("zset".equals(keyType)) {
String zcursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<Tuple> sscanResult = cluster.zscan(key, zcursor, sp);
zcursor = sscanResult.getStringCursor();
for (Tuple data : sscanResult.getResult()) {
JSONObject dataJson = new JSONObject();
dataJson.put("score", data.getScore());
dataJson.put("value", data.getElement());
value.add(dataJson);
}
} while (!"0".equals(zcursor));
json.put("value", value);
} else {
String info = "unknowKeyType:" + keyType + "key:" + key;
System.out.println(info);
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter(filePath + ".error", true));
bw.write(info);
bw.write('\r');
bw.write('\n');
return true;
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
return false;
}
synchronized (this) {//多线程里会调用这个方法
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter(filePath, true));
bw.write(json.toJSONString());
bw.write('\r');
bw.write('\n');
return true;
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
return false;
}
/**
* 按key导出数据
*/
public void exportHostKeys(String host, String port, String keys, String filePath) {
String[] keysInfo = keys.split(",");
Jedis nodeCli = new Jedis(host, Integer.valueOf(port));
long beginTime = System.currentTimeMillis();
for (String key : keysInfo) {
JSONObject json = new JSONObject();
json.put("key", key);
String keyType = nodeCli.type(key);
json.put("type", keyType);
if ("hash".equals(keyType)) {
String hcursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<Entry<String, String>> hscanResult = nodeCli.hscan(key, hcursor, sp);
hcursor = hscanResult.getStringCursor();
for (Entry<String, String> entry : hscanResult.getResult()) {
JSONObject valueData = new JSONObject();
valueData.put("key", entry.getKey());
valueData.put("value", entry.getValue());
value.add(valueData);
}
} while (!"0".equals(hcursor));
json.put("value", value);
} else if ("string".equals(keyType)) {
json.put("value", nodeCli.get(key));
} else if ("list".equals(keyType)) {
int readSize, readCount = 1;
long start = 0, end = start + readCount;
JSONArray value = new JSONArray();
do {
List<String> data = nodeCli.lrange(key, start, end);
readSize = data.size();
for (int i = 0; i < readSize; i++) {
value.add(data.get(i));
//System.out.println("data:" + data.get(i));
}
start = end + 1;
end += readSize;
} while (readSize == readCount + 1);
json.put("value", value);
} else if ("set".equals(keyType)) {
String scursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<String> sscanResult = nodeCli.sscan(key, scursor, sp);
scursor = sscanResult.getStringCursor();
for (String data : sscanResult.getResult()) {
value.add(data);
}
} while (!"0".equals(scursor));
json.put("value", value);
} else if ("zset".equals(keyType)) {
String zcursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<Tuple> sscanResult = nodeCli.zscan(key, zcursor, sp);
zcursor = sscanResult.getStringCursor();
for (Tuple data : sscanResult.getResult()) {
JSONObject dataJson = new JSONObject();
dataJson.put("score", data.getScore());
dataJson.put("value", data.getElement());
value.add(dataJson);
}
} while (!"0".equals(zcursor));
json.put("value", value);
} else {
System.out.println("unknow keyType:" + keyType + "key:" + key);
}
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter(filePath));
bw.write(json.toJSONString());
bw.write('\r');
bw.write('\n');
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
nodeCli.close();
String useTime = " useTime->" + ((System.currentTimeMillis() - beginTime) / 1000) + "s";
System.out.println(useTime);
}
/**
* 按key导出数据
*/
public void exportKeyOneHost(String keyPre, String filePath) {
String[] exportKeyPre = keyPre.split(",");
Jedis nodeCli = new Jedis(REDIS_HOST, REDIS_PORT);
long scanTotalcount = 0, exportTotalCount = 0;
long beginTime = System.currentTimeMillis();
String info = nodeCli.info("Keyspace");
long dbKeySize = 0;
if (info.indexOf("db0:keys=") > 0) {
String value = info.substring(info.indexOf("db0:keys=") + "db0:keys=".length()).split(",")[0];
dbKeySize = Long.valueOf(value);
}
String cursor = "0";
long thisScanSize = 0, thisExportSize = 0;
do {
ScanResult<String> keys = nodeCli.scan(cursor, sp);
cursor = keys.getStringCursor();
List<String> result = keys.getResult();
for (String key : result) {
thisScanSize++;
scanTotalcount++;
if (thisScanSize % 1000 == 0) {
System.out.println("thisScanSize:" + thisScanSize + "/" + dbKeySize + " thisExportSize:"
+ thisExportSize + " totalUseTime:" + (System.currentTimeMillis() - beginTime) / 1000
+ "s)");
}
boolean isExport = false;
for (String keyExport : exportKeyPre) {
if ("*".equals(keyExport) || key.startsWith(keyExport)) {
isExport = true;
break;
}
}
if (!isExport) {
continue;
}
JSONObject json = new JSONObject();
json.put("key", key);
String keyType = nodeCli.type(key);
json.put("type", keyType);
if ("hash".equals(keyType)) {
String hcursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<Entry<String, String>> hscanResult = nodeCli.hscan(key, hcursor, sp);
hcursor = hscanResult.getStringCursor();
for (Entry<String, String> entry : hscanResult.getResult()) {
JSONObject valueData = new JSONObject();
valueData.put("key", entry.getKey());
valueData.put("value", entry.getValue());
value.add(valueData);
}
} while (!"0".equals(hcursor));
json.put("value", value);
} else if ("string".equals(keyType)) {
json.put("value", nodeCli.get(key));
} else if ("list".equals(keyType)) {
int readSize, readCount = 1;
long start = 0, end = start + readCount;
JSONArray value = new JSONArray();
do {
List<String> data = nodeCli.lrange(key, start, end);
readSize = data.size();
for (int i = 0; i < readSize; i++) {
value.add(data.get(i));
//System.out.println("data:" + data.get(i));
}
start = end + 1;
end += readSize;
} while (readSize == readCount + 1);
json.put("value", value);
} else if ("set".equals(keyType)) {
String scursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<String> sscanResult = nodeCli.sscan(key, scursor, sp);
scursor = sscanResult.getStringCursor();
for (String data : sscanResult.getResult()) {
value.add(data);
}
} while (!"0".equals(scursor));
json.put("value", value);
} else if ("zset".equals(keyType)) {
String zcursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<Tuple> sscanResult = nodeCli.zscan(key, zcursor, sp);
zcursor = sscanResult.getStringCursor();
for (Tuple data : sscanResult.getResult()) {
JSONObject dataJson = new JSONObject();
dataJson.put("score", data.getScore());
dataJson.put("value", data.getElement());
value.add(dataJson);
}
} while (!"0".equals(zcursor));
json.put("value", value);
} else {
System.out.println("unknow keyType:" + keyType + "key:" + key);
}
// System.out.println("data json:" + json);
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter(filePath, true));
bw.write(json.toJSONString());
bw.write('\r');
bw.write('\n');
thisExportSize++;
exportTotalCount++;
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
} while ((!"0".equals(cursor)));
nodeCli.close();
SimpleDateFormat dfs = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss [");
String useTime = " useTime->" + ((System.currentTimeMillis() - beginTime) / 1000) + "s";
System.out.println(dfs.format(new Date()) + "exportKey:" + keyPre + "]" + useTime);
System.out.println("scanTotalcount->" + scanTotalcount + " exportTotalCount->" + exportTotalCount);
}
/**
* 按照key前缀查询
*/
public void queryKeyLike(String pattern) {
Jedis jedis = new Jedis(REDIS_HOST, REDIS_PORT);
String nodes = jedis.clusterNodes();
jedis.close();
int count = 0;
long beginTime = System.currentTimeMillis();
for (String node : nodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
String host = nodeInfo[1].split(":")[0];
int port = Integer.valueOf(nodeInfo[1].split(":")[1]);
String type = nodeInfo[2];
if (type.contains("master")) {
Jedis nodeCli = new Jedis(host, port);//连接redis
Set<String> keys = nodeCli.keys(pattern);//
Iterator<String> t1 = keys.iterator();
while (t1.hasNext()) {
String key = t1.next();
System.out.println(key + "->" + nodeCli.get(key));
count++;
}
nodeCli.close();
}
}
SimpleDateFormat dfs = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss [");
System.out.println(dfs.format(new Date()) + pattern + "] query count->" + count + " useTime->"
+ ((System.currentTimeMillis() - beginTime)) + "ms ");
}
/**
* 按照key前缀统计
*/
public void countKeyLike(String pattern) {
Jedis jedis = new Jedis(REDIS_HOST, REDIS_PORT);
String nodes = jedis.clusterNodes();
jedis.close();
int count = 0;
long beginTime = System.currentTimeMillis();
for (String node : nodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
String host = nodeInfo[1].split(":")[0];
int port = Integer.valueOf(nodeInfo[1].split(":")[1]);
String type = nodeInfo[2];
if (type.contains("master")) {
Jedis nodeCli = new Jedis(host, port);//连接redis
Set<String> keys = nodeCli.keys(pattern);//
count += keys.size();
nodeCli.close();
}
}
SimpleDateFormat dfs = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss [");
System.out.println(dfs.format(new Date()) + pattern + "] count->" + count + " useTime->"
+ ((System.currentTimeMillis() - beginTime)) + "ms ");
}
/**
* 监控集群状态
*/
public void monitor(String[] args) {
double connected_clients = 0, total_commands_processed = 0, instantaneous_ops_per_sec = 0, total_net_input_bytes = 0, total_net_output_bytes = 0, instantaneous_input_kbps = 0, instantaneous_output_kbps = 0, used_memory = 0;
long keyTotalCount = 0;
DecimalFormat formatDouble = new DecimalFormat("##0.00");//格式化设置
DecimalFormat formatLong = new DecimalFormat("##0");//格式化设置
Map<String, String> opsMap = new TreeMap<String, String>();
Map<String, String> ramMap = new TreeMap<String, String>();
Map<String, String> inputMap = new TreeMap<String, String>();
Map<String, String> outputMap = new TreeMap<String, String>();
Iterator<Entry<String, JedisPool>> nodes = cluster.getClusterNodes().entrySet().iterator();
while (nodes.hasNext()) {
Entry<String, JedisPool> entry = nodes.next();
JedisPool pool = entry.getValue();
String info = null;
Jedis jedis;
try {
jedis = pool.getResource();
info = jedis.info();
pool.returnResourceObject(jedis);
} catch (JedisConnectionException e) {
String msg = e.getMessage();
if (msg.contains("Connection refused")) {
System.out.println(entry.getKey() + " Connection refused");
continue;
}
} catch (Exception e) {
e.printStackTrace();
}
if (info.contains("role:slave")) {//只统计master
continue;
}
connected_clients += getValue(info, "connected_clients");
total_commands_processed += getValue(info, "total_commands_processed");
instantaneous_ops_per_sec += getValue(info, "instantaneous_ops_per_sec");
opsMap.put(entry.getKey(), formatDouble.format(getValue(info, "instantaneous_ops_per_sec")));
total_net_input_bytes += getValue(info, "total_net_input_bytes");
total_net_output_bytes += getValue(info, "total_net_output_bytes");
instantaneous_input_kbps += getValue(info, "instantaneous_input_kbps");
inputMap.put(entry.getKey(), formatDouble.format(getValue(info, "instantaneous_input_kbps") / 1024) + "KB");
instantaneous_output_kbps += getValue(info, "instantaneous_output_kbps");
outputMap.put(entry.getKey(), formatDouble.format(getValue(info, "instantaneous_output_kbps") / 1024)
+ "KB");
used_memory += getValue(info, "used_memory");
ramMap.put(entry.getKey(), formatDouble.format(getValue(info, "used_memory") / 1024 / 1024) + "MB");
if (info.indexOf("db0:keys=") > 0) {
String value = info.substring(info.indexOf("db0:keys=") + "db0:keys=".length()).split(",")[0];
keyTotalCount += Long.valueOf(value);
}
}
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
StringBuffer sb = new StringBuffer();
sb.append(sdf.format(new Date()));
sb.append(",");
sb.append(formatLong.format(connected_clients));
sb.append(",");
sb.append(formatLong.format(total_commands_processed));
sb.append(",");
sb.append(formatLong.format(instantaneous_ops_per_sec));
sb.append(",");
sb.append(formatDouble.format(total_net_input_bytes / 1024 / 1024));
sb.append(",");
sb.append(formatDouble.format(total_net_output_bytes / 1024 / 1024));
sb.append(",");
sb.append(formatDouble.format(instantaneous_input_kbps));
sb.append(",");
sb.append(formatDouble.format(instantaneous_output_kbps));
sb.append(",");
sb.append(formatDouble.format(used_memory / 1024 / 1024));
sb.append(",");
sb.append(keyTotalCount);
System.out.println(sb.toString());
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter(SystemConf.confFileDir + "/monitor.csv", true));
bw.write(sb.toString());
bw.write('\r');
bw.write('\n');
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
*/
public void info(String[] args) {
double connected_clients = 0, total_commands_processed = 0, instantaneous_ops_per_sec = 0, total_net_input_bytes = 0, total_net_output_bytes = 0, instantaneous_input_kbps = 0, instantaneous_output_kbps = 0, used_memory = 0;
long keyTotalCount = 0;
DecimalFormat formatDouble = new DecimalFormat("#,##0.00");//格式化设置
DecimalFormat formatLong = new DecimalFormat("#,##0");//格式化设置
Map<String, String> opsMap = new TreeMap<String, String>();
Map<String, String> ramMap = new TreeMap<String, String>();
Map<String, String> inputMap = new TreeMap<String, String>();
Map<String, String> outputMap = new TreeMap<String, String>();
Iterator<Entry<String, JedisPool>> nodes = cluster.getClusterNodes().entrySet().iterator();
while (nodes.hasNext()) {
Entry<String, JedisPool> entry = nodes.next();
String info = null;
try {
info = entry.getValue().getResource().info();
} catch (JedisConnectionException e) {
String msg = e.getMessage();
if (msg.contains("Connection refused")) {
System.out.println(entry.getKey() + " Connection refused");
continue;
}
}
if (null == info || info.contains("role:slave")) {//只统计master
continue;
}
connected_clients += getValue(info, "connected_clients");
total_commands_processed += getValue(info, "total_commands_processed");
instantaneous_ops_per_sec += getValue(info, "instantaneous_ops_per_sec");
opsMap.put(entry.getKey(), formatDouble.format(getValue(info, "instantaneous_ops_per_sec")));
total_net_input_bytes += getValue(info, "total_net_input_bytes");
total_net_output_bytes += getValue(info, "total_net_output_bytes");
instantaneous_input_kbps += getValue(info, "instantaneous_input_kbps");
inputMap.put(entry.getKey(), formatDouble.format(getValue(info, "instantaneous_input_kbps") / 1024) + "KB");
instantaneous_output_kbps += getValue(info, "instantaneous_output_kbps");
outputMap.put(entry.getKey(), formatDouble.format(getValue(info, "instantaneous_output_kbps") / 1024)
+ "KB");
used_memory += getValue(info, "used_memory");
ramMap.put(entry.getKey(), formatDouble.format(getValue(info, "used_memory") / 1024 / 1024) + "MB");
if (info.indexOf("db0:keys=") > 0) {
String value = info.substring(info.indexOf("db0:keys=") + "db0:keys=".length()).split(",")[0];
keyTotalCount += Long.valueOf(value);
}
}
if (args.length >= 2) {
Iterator<Entry<String, String>> it;
for (int i = 0; i < args.length; i++) {
if ("ops".equals(args[i])) {
it = opsMap.entrySet().iterator();
System.out.println("instantaneous_ops_per_sec");
while (it.hasNext()) {
Entry<String, String> entry = it.next();
System.out.println(entry.getKey() + "->" + entry.getValue());
}
System.out.println("instantaneous_ops_per_sec:" + formatLong.format(instantaneous_ops_per_sec));
} else if ("input".equals(args[i])) {
it = inputMap.entrySet().iterator();
System.out.println("instantaneous_input_kbps");
while (it.hasNext()) {
Entry<String, String> entry = it.next();
System.out.println(entry.getKey() + "->" + entry.getValue());
}
System.out.println("total_net_input_bytes:"
+ formatDouble.format(total_net_input_bytes / 1024 / 1024) + "MB");
} else if ("output".equals(args[i])) {
it = outputMap.entrySet().iterator();
System.out.println("instantaneous_output_kbps");
while (it.hasNext()) {
Entry<String, String> entry = it.next();
System.out.println(entry.getKey() + "->" + entry.getValue());
}
System.out.println("total_net_output_bytes:"
+ formatDouble.format(total_net_output_bytes / 1024 / 1024) + "MB");
} else if ("ram".equals(args[i])) {
it = ramMap.entrySet().iterator();
System.out.println("used_memory");
while (it.hasNext()) {
Entry<String, String> entry = it.next();
System.out.println(entry.getKey() + "->" + entry.getValue());
}
System.out.println("used_memory:" + formatDouble.format(used_memory / 1024 / 1024) + "MB");
}
}
} else {
System.out.println("connected_clients:" + formatLong.format(connected_clients));
System.out.println("total_commands_processed:" + formatLong.format(total_commands_processed));
System.out.println("instantaneous_ops_per_sec:" + formatLong.format(instantaneous_ops_per_sec));
System.out.println("total_net_input_bytes:" + formatDouble.format(total_net_input_bytes / 1024 / 1024)
+ "MB");
System.out.println("total_net_output_bytes:" + formatDouble.format(total_net_output_bytes / 1024 / 1024)
+ "MB");
System.out.println("instantaneous_input_kbps:" + formatDouble.format(instantaneous_input_kbps));
System.out.println("instantaneous_output_kbps:" + formatDouble.format(instantaneous_output_kbps));
System.out.println("used_memory:" + formatDouble.format(used_memory / 1024 / 1024) + "MB");
System.out.println("keyTotalCount:" + keyTotalCount);
}
}
private double getValue(String info, String key) {
String value;
value = info.substring(info.indexOf(key) + key.length() + 1).split("\r\n")[0];
return Double.valueOf(value);
}
/**
* 查询key
*/
public void keys(String pattern) {
Jedis jedis = new Jedis(REDIS_HOST, REDIS_PORT);
String nodes = jedis.clusterNodes();
jedis.close();
int count = 0;
long beginTime = System.currentTimeMillis();
for (String node : nodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
String host = nodeInfo[1].split(":")[0];
int port = Integer.valueOf(nodeInfo[1].split(":")[1]);
String type = nodeInfo[2];
if (type.contains("master")) {
Jedis nodeCli = new Jedis(host, port);//连接redis
Set<String> keys = nodeCli.keys(pattern);// TODO change use scan
for (String key : keys) {
System.out.println(key);
}
nodeCli.close();
}
}
SimpleDateFormat dfs = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss [");
System.out.println(dfs.format(new Date()) + pattern + "*] count->" + count + " useTime->"
+ ((System.currentTimeMillis() - beginTime)) + "ms ");
}
/**
* 统计所有的key数量
*/
public long keySize() {
Jedis jedis = new Jedis(REDIS_HOST, REDIS_PORT);
String nodes = jedis.clusterNodes();
jedis.close();
long count = 0;
long beginTime = System.currentTimeMillis();
for (String node : nodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
String host = nodeInfo[1].split(":")[0];
int port = Integer.valueOf(nodeInfo[1].split(":")[1]);
String type = nodeInfo[2];
if (type.contains("master") && !type.contains("fail")) {
try {
Jedis nodeCli = new Jedis(host, port);//连接redis
String info = nodeCli.info("Keyspace");
if (info.indexOf("db0:keys=") > 0) {
String value = info.substring(info.indexOf("db0:keys=") + "db0:keys=".length()).split(",")[0];
count += Long.valueOf(value);
}
nodeCli.close();
} catch (Exception e) {
}
}
}
System.out.println("clusterKeySize:" + count + " useTime->" + ((System.currentTimeMillis() - beginTime))
+ "ms ");
return count;
}
/**
* 按照key前缀清除缓存
*
*/
public void dels(String keyPre, final String filePath) {
final String[] exportKeyPre = keyPre.split(",");
createExportFile(filePath);
Iterator<Entry<String, JedisPool>> nodes = cluster.getClusterNodes().entrySet().iterator();
List<Thread> exportTheadList = new ArrayList<Thread>();
while (nodes.hasNext()) {
Entry<String, JedisPool> entry = nodes.next();
try {
entry.getValue().getResource();
} catch (redis.clients.jedis.exceptions.JedisConnectionException e) {//有失败的节点连不上
System.out.println(entry.getKey() + " conn error:" + e.getMessage());
continue;
}
final Jedis nodeCli = entry.getValue().getResource();
String info = entry.getValue().getResource().info();
if (info.contains("role:slave")) {//只能从master删除
continue;
}
Thread exportThread = new Thread(new Runnable() {
@Override
public void run() {
String cursor = "0";
do {
ScanResult<String> keys = nodeCli.scan(cursor, sp);
cursor = keys.getStringCursor();
List<String> result = keys.getResult();
for (String key : result) {
for (String keyExport : exportKeyPre) {
if ("*".equals(keyExport) || key.startsWith(keyExport)) {
nodeCli.del(key);
writeFile(key, "del", filePath);
break;
}
}
}
} while ((!"0".equals(cursor)));
}
}, entry.getKey() + "del thread");
exportTheadList.add(exportThread);
exportThread.start();
}
waitThread(exportTheadList);
long useTime = System.currentTimeMillis() - writeBeginTime, totalCount = scanCount.get();
float speed = (float) (totalCount / (useTime / 1000.0));
System.out.println("del total:" + totalCount + " speed:" + speedFormat.format(speed) + " useTime:"
+ (useTime / 1000.0) + "s");
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 按照key前缀统计
*/
public void printKeyLike(String pattern) {
Jedis jedis = new Jedis(REDIS_HOST, REDIS_PORT);
String nodes = jedis.clusterNodes();
jedis.close();
int count = 0;
long beginTime = System.currentTimeMillis();
for (String node : nodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
String host = nodeInfo[1].split(":")[0];
int port = Integer.valueOf(nodeInfo[1].split(":")[1]);
String type = nodeInfo[2];
if (type.contains("master")) {
Jedis nodeCli = new Jedis(host, port);//连接redis
Set<String> keys = nodeCli.keys(pattern);//
count += keys.size();
nodeCli.close();
}
}
SimpleDateFormat dfs = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss [");
System.out.println(dfs.format(new Date()) + pattern + "] count->" + count + " useTime->"
+ ((System.currentTimeMillis() - beginTime)) + "ms ");
}
/**
* java -jar redis-cluster-util-jar-with-dependencies.jar h
*/
@SuppressWarnings("unchecked")
public static void main(String[] args) throws Exception {
// args = new String[] { "add-master", "172.20.16.87:29000", "172.20.16.88:29000", "172.20.16.89:29000" };
// args = new String[] { "add-master", "172.20.16.87:29000", "172.20.16.88:29000" };
//args = new String[] { "add-master", "172.20.16.87:29005" };
// args = new String[] { "analyze", "isKeyStat=true", "isCmdDetail=true", "showTop=20", "host=172.20.16.48",
// "port=5001", "monitorTime=5" };
//args = new String[] { "add-slave","172.20.16.87:29000->172.20.16.88:29000;172.20.16.87:29001->172.20.16.88:29001" };
//args = new String[] { "add-slave","172.20.16.87:29001->172.20.16.88:29001" };
//args = new String[] { "add-node", "172.20.16.91:29010", "172.20.16.89:29010" };
//args = new String[] { "bakup-node", "D://abc" };
// args = new String[] { "benchmark", "E:/bakup/jumei-app/show-dev-data-export.dat", "10" };
// args = new String[] { "check" };
// args = new String[] { "count" };
//args = new String[] { "create",
// "172.20.16.87:29000->172.20.16.88:29000;172.20.16.87:29001->172.20.16.88:29001;172.20.16.87:29002->172.20.16.88:29002" };
// args = new String[] { "del" };
// args = new String[] { "dels" };
// args = new String[] { "del-node", ":0" };
// args = new String[] { "del-node", "172.20.16.87:29000" };
// args = new String[] { "del-node", "172.20.16.88:29000;172.20.16.89:29000" };
//args = new String[] { "export", "*", "d:/show-dev-data-export.dat" };
//args = new String[] { "export-keys", "s_f_p_9186_86964530,s_f_p_7580_68233821", "d:/show-key-export.dat" };
// args = new String[] { "export-keys-file", "d:/keys.txt", "d:/show-key-export.dat" };
//args = new String[] { "fix-slot", "172.20.16.88:29000" };
// args = new String[] { "failover", "192.168.254.130:5001" };
// args = new String[] { "fix-slot-cover", "192.168.254.129:5001" };
// args = new String[] { "fix-slot-stable", "192.168.254.129:5001" };
// args = new String[] { "flush" };
// args = new String[] { "get" };
// args = new String[] { "import", "l,s", "d:/show-dev-data-export.dat" };
//args = new String[] { "import", "*", "E:/bakup/jumei-app/show-online-2016.2.3.dat" };
// args = new String[] { "import-mongodb", "*", "D:/bakup/jumeiapp-redis/show-imported-list.2016.1.11.dat" };
// args = new String[] { "info" };
// args = new String[] { "info", "output", "ops" };
// args = new String[] { "keys"};
// args = new String[] { "keysize"};
//args = new String[] { "monitor", "2" };
// args = new String[] { "raminfo", "*" };
//args = new String[] { "raminfo", "172.20.16.89:5001" };
//args = new String[] { "rubbish-del" };
//args = new String[] { "key-size-count", "u_id_set", "D:/" };
//args = new String[] { "reshard", "172.20.16.87:29000", "0-1024;1025-2048;4096-4096;4098-4301" };
//"reshard" "192.168.254.129:5000" "0-1024;1025-2048;4096-4096;4098-4301"
// args = new String[] { "set", "testkey", "testvalue" };
// args = new String[] { "h" };
//args = new String[] { "followRestore", "*", "D:/29000-u_f.dat" };
//args = "safe-delete safe-delete 1".split(" ");
Runtime.getRuntime().addShutdownHook(new CleanWorkThread());
RedisClusterManager rcm = new RedisClusterManager();
long beginTime = System.currentTimeMillis();
if (args.length == 0) {
printHelp();
return;
}
String cmd = args[0];
if ("raminfo".equals(cmd) || "exporth".equals(cmd) || "exportHostKeys".equals(cmd)) {
} else {
connectCluser();
}
if (args.length > 0) {
if ("add-slave".equals(cmd)) {
if (args.length == 2) {
String[] master2slave = trim(args[1]).split(";");
for (int i = 0; i < master2slave.length; i++) {
String[] hostsInfo = master2slave[i].split("->");
if (hostsInfo.length == 2) {
rcm.addSlave(hostsInfo[0], hostsInfo[1], false);
} else {
System.out.println("请输入要添加的节点及主节点列表");
}
}
Thread.sleep(3000);//等待集群配置同步
rcm.check();
} else {
System.out.println("请输入主备关系:host1:port1->host2:port1;host1:port2->host2:port2;");
}
} else if ("bakup-node".equals(cmd)) {
if (args.length == 2) {
rcm.bakupNode(args[1]);
} else {
System.out.println("参数错误!");
}
} else if ("analyze".equals(cmd)) {
MonitorUtil.main(args);
} else if ("fansCount".equals(cmd)) {
if (args.length == 2) {
rcm.fansCount(args[1]);
} else {
System.out.println("fansCount D:/export.dat");
}
} else if ("exportHostKeys".equals(cmd)) {
if (args.length == 5) {
rcm.exportHostKeys(args[1], args[2], args[3], args[4]);
} else {
System.out.println("exportHostKeys ip port key1,key2 D:/export.dat");
}
} else if ("followDel".equals(cmd)) {
if (args.length == 3) {
rcm.followDel(args[1], args[2]);
} else {
System.out.println("followDel D:/u_f_uid_delete.dat");
}
} else if ("followAttentionDel".equals(cmd)) {
if (args.length == 3) {
rcm.followAttentionDel(args[1], args[2]);
} else {
System.out.println("followAttentionDel D:/u_a_uid_delete.dat");
}
} else if ("followRestore".equals(cmd)) {
if (args.length == 3) {
rcm.followRestore(args[1], args[2]);
} else {
System.out.println("followRestore D:/29000-u_f.dat");
}
} else if ("praiseDel".equals(cmd)) {
if (args.length == 3) {
rcm.praiseDel(args[1], args[2]);
} else {
System.out.println("praiseDel D:/input.dat");
}
} else if ("praiseCountDel".equals(cmd)) {
if (args.length == 3) {
rcm.praiseCountDel(args[1], args[2]);
} else {
System.out.println("praiseCountDel D:/input.dat");
}
} else if ("praiseCount".equals(cmd)) {
if (args.length == 3) {
rcm.praiseCount(args[1], args[2]);
} else {
System.out.println("praiseCount D:/export.dat");
}
} else if ("uaCheck".equals(cmd)) {
if (args.length == 2) {
rcm.uaCheck(args[1]);
} else {
System.out.println("fansCheck D:/export.dat");
}
} else if ("ufCheck".equals(cmd)) {
if (args.length == 2) {
rcm.ufCheck(args[1]);
} else {
System.out.println("fansCheck D:/export.dat");
}
} else if ("raminfo".equals(cmd)) {
if (args.length == 2) {
rcm.raminfo(args[1]);
} else {
connectCluser();
rcm.raminfo(null);
}
} else if ("safe-delete".equals(cmd)) {
if (args.length == 2) {
rcm.safeDelete(args[1], Long.MAX_VALUE);
} else if (args.length == 3) {
rcm.safeDelete(args[1], Long.parseLong(args[2]));
} else {
System.out.println("参数错误");
}
} else if ("create".equals(cmd)) {
StringBuffer sb = new StringBuffer();
for (int i = 1; i < args.length; i++) {
sb.append(args[i]);
}
String hostTrim = trim(sb.toString());
String[] master2slave = hostTrim.split(";");
rcm.create(rcm, master2slave);
Thread.sleep(3000);//等待集群配置同步
rcm.check();
} else if ("reshard".equals(cmd)) {
rcm.reshard(args);
} else if ("failover".equals(cmd)) {
String[] slaves = trim(args[1]).split(";");
for (String slave : slaves) {
rcm.failOver(slave);
}
Thread.sleep(3000);//等待集群配置同步
rcm.check();
} else if ("fix-slot-cover".equals(cmd)) {
rcm.fixSlotCover(args[1]);
Thread.sleep(3000);//等待集群配置同步
rcm.check();
} else if ("fix-slot-stable".equals(cmd)) {
rcm.fixSlotStable();
Thread.sleep(3000);//等待集群配置同步
rcm.check();
} else if ("add-master".equals(cmd)) {
if (args.length >= 2) {
rcm.addMaster(args);
Thread.sleep(3000);//等待集群配置同步
rcm.check();
} else {
System.out.println("请输入要添加的 主节点");
}
} else if ("dels".equals(cmd)) {
if (args.length == 3) {
rcm.dels(args[1], args[2]);
} else {
System.out.println("dels keyPattern D:/delKey.dat");
}
} else if ("counts".equals(cmd)) {
if (args.length == 1) {
System.out.println("请输入要统计的key前缀");
} else {
for (int i = 1; i < args.length; i++) {
rcm.countKeyLike(args[i]);
}
}
} else if ("del-node".equals(cmd)) {
if (args.length == 2) {
String[] hostsInfo = trim(args[1]).split(";");
for (int i = 0; i < hostsInfo.length; i++) {
rcm.delNode(hostsInfo[i]);
}
Thread.sleep(3000);//等待集群配置同步
rcm.check();
} else {
System.out.println("请输入要删除的节点:host1:port1;host2:port2;");
}
} else if ("querys".equals(cmd)) {
if (args.length == 1) {
rcm.queryKeyLike("");
} else {
for (int i = 1; i < args.length; i++) {
rcm.queryKeyLike(args[i]);
}
}
} else if ("export".equals(cmd)) {
if (args.length == 3) {
rcm.exportKeyPre(args[1], args[2]);
} else {
System.out.println("export keyPattern D:/export.dat");
}
} else if ("exporth".equals(cmd)) {
if (args.length == 3) {
rcm.exportKeyOneHost(args[1], args[2]);
} else {
System.out.println("export keyPattern D:/export.dat");
}
} else if ("export-keys".equals(cmd)) {
if (args.length == 3) {
rcm.exportKeys(args[1], args[2]);
} else {
System.out.println("export keys D:/export.dat");
}
} else if ("export-keys-file".equals(cmd)) {
if (args.length == 3) {
rcm.exportKeysFile(args[1], args[2]);
} else {
System.out.println("export keys D:/export.dat");
}
} else if ("import".equals(cmd)) {
if (args.length == 3) {
rcm.importKey(args[1], args[2]);
} else {
System.out.println("import keyPattern D:/import.dat");
}
} else if ("import-mongodb".equals(cmd)) {
if (args.length == 3) {
rcm.importMongodb(args[1], args[2]);
} else {
System.out.println("import keyPattern D:/import.dat");
}
} else if ("restoreUserHash".equals(cmd)) {
if (args.length == 2) {
rcm.restoreUserHash(args[1]);
} else {
System.out.println("restoreUserHash uid1,uid2");
}
} else if ("restoreShowPraise".equals(cmd)) {
if (args.length == 2) {
rcm.restoreShowPraise(args[1]);
} else {
System.out.println("restoreUserHash uid1,uid2");
}
} else if ("followRestoreByUids".equals(cmd)) {
if (args.length == 2) {
rcm.followRestoreByUids(args[1]);
} else {
System.out.println("followRestoreByUids uid1,uid2");
}
} else if ("set".equals(cmd) || "del".equals(cmd)) {
rcm.opt(args);
} else if ("get".equals(cmd)) {
rcm.opt(args);
} else if ("keys".equals(cmd)) {
if (args.length == 1) {
System.out.println("请输入要查詢的key前缀");
} else {
rcm.keys(args[1]);
}
} else if ("keysize".equals(cmd)) {
rcm.keySize();
} else if ("key-size-count".equals(cmd)) {
if (args.length == 3) {
rcm.keySizeCount(args[1], args[2]);
} else {
System.out.println("key-size-count u_id_set D:/");
}
} else if ("info".equals(cmd)) {
rcm.info(args);
} else if ("monitor".equals(cmd)) {
long sleepTime = 1000;
if (args.length == 2) {
sleepTime = Long.valueOf(args[1]) * 1000;
}
while (true) {
try {
rcm.monitor(args);
} catch (Throwable e) {
e.printStackTrace();
}
Thread.sleep(sleepTime);
}
} else if ("check".equals(cmd)) {
rcm.check();
} else if ("flush".equals(cmd)) {
rcm.flushall();
} else if ("h".equals(cmd) || "-h".equals(cmd) || "help".equals(cmd)) {
printHelp();
} else {
Jedis jedis = new Jedis(REDIS_HOST, REDIS_PORT);
Map<Integer, String> slot2Host = new HashMap<Integer, String>();
List<Object> slotInfos = jedis.clusterSlots();
for (Object slotInfo : slotInfos) {
List<Object> slotInfoList = (List<Object>) slotInfo;
long begin = (Long) slotInfoList.get(0);
long end = (Long) slotInfoList.get(1);
@SuppressWarnings("rawtypes")
List hostInfoList = (ArrayList) slotInfoList.get(2);
String host = new String((byte[]) hostInfoList.get(0));
int port = Integer.valueOf(hostInfoList.get(1).toString());
String hostInfo = host + ":" + port;
for (int i = (int) begin; i <= end; i++) {
slot2Host.put(i, hostInfo);
}
}
jedis.close();
String key = args[1];
int slot = JedisClusterCRC16.getCRC16(key) % 16384;
String[] hostInfo = null;
String hostPort = slot2Host.get(slot);
if (null != hostPort) {
hostInfo = hostPort.split(":");
cmd = "redis-cli -h " + hostInfo[0] + " -p " + hostInfo[1];
for (int i = 0; i < args.length; i++) {
cmd = cmd + " " + args[i];
}
executeCmd(cmd);
} else {
System.out.println("not cover solt:" + slot);
}
}
for (String arg : args) {
System.out.print(arg + " ");
}
System.out.println("finish use time " + ((System.currentTimeMillis() - beginTime)) + "ms");
}
}
public Map<String, AtomicLong> ramSizeCount = new ConcurrentHashMap<String, AtomicLong>();
public Map<String, AtomicLong> ramKeyCount = new ConcurrentHashMap<String, AtomicLong>();
public StringBuffer ramUnknowKey = new StringBuffer();
private void writeRamInfo() {
BufferedWriter raminfoUnknow = null;
try {
Iterator<Entry<String, AtomicLong>> it = ramKeyCount.entrySet().iterator();
System.out.println("key type size:" + ramKeyCount.size());
bw = new BufferedWriter(new FileWriter(SystemConf.confFileDir + "/raminfo.csv"));
bw = new BufferedWriter(new FileWriter(SystemConf.confFileDir + "/raminfo.csv"));
while (it.hasNext()) {
Entry<String, AtomicLong> entry = it.next();
String info = entry.getKey() + "," + entry.getValue() + "," + ramSizeCount.get(entry.getKey()) + "\r\n";
bw.write(info);
}
raminfoUnknow = new BufferedWriter(new FileWriter(SystemConf.confFileDir + "/raminfoUnknowKey.txt", true));
ramUnknowKey.append("\r\n");
raminfoUnknow.write(ramUnknowKey.toString());
ramUnknowKey = new StringBuffer();
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
try {
if (null != raminfoUnknow) {
raminfoUnknow.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
* 按key分类进行统计
*/
public void raminfo(String node) {
List<Thread> exportTheadList = new ArrayList<Thread>();
if (null != node) {
String[] hostInfo = node.split(":");
Jedis jedis = new Jedis(hostInfo[0], Integer.valueOf(hostInfo[1]));
nodeAnalyze(exportTheadList, node, jedis);
} else {
Iterator<Entry<String, JedisPool>> nodes = cluster.getClusterNodes().entrySet().iterator();
while (nodes.hasNext()) {
Entry<String, JedisPool> entry = nodes.next();
if (null != node) {
if (!node.equals(entry.getKey())) {
continue;
}
}
final Jedis nodeCli = entry.getValue().getResource();
String info = entry.getValue().getResource().info();
if (null == node && info.contains("role:slave")) {//如果没有指定节点,统计所有master
continue;
}
nodeAnalyze(exportTheadList, entry.getKey(), nodeCli);
}
}
waitThread(exportTheadList);
writeRamInfo();
long useTime = System.currentTimeMillis() - writeBeginTime, totalCount = scanCount.get();
float speed = (float) (totalCount / (useTime / 1000.0));
System.out.println("scan total:" + totalCount + " speed:" + speedFormat.format(speed) + " useTime:"
+ (useTime / 1000.0) + "s");
}
private void nodeAnalyze(List<Thread> exportTheadList, String node, final Jedis nodeCli) {
Thread exportThread = new Thread(new Runnable() {
@Override
public void run() {
String cursor = "0";
int len = "serializedlength:".length();
do {
ScanResult<String> keys = nodeCli.scan(cursor, sp);
cursor = keys.getStringCursor();
List<String> result = keys.getResult();
for (String key : result) {
String debug = nodeCli.debug(DebugParams.OBJECT(key));
int startIndex = debug.indexOf("serializedlength:");
int endIndex = debug.indexOf(" ", startIndex);
debug = debug.substring(startIndex + len, endIndex);
int i = 0;
//key = "s_c_p23926";//testkey
//key = "26228273praiseto101909365showid10290";//testkey
if (key.startsWith("rpcUserInfo")) {
key = "rpcUserInfo";
} else if (key.startsWith("s_url")) {
key = "s_url";
} else if (key.startsWith("live_link_")) {
key = "live_link_";
} else if (key.startsWith("historyappmessages")) {
key = "historyappmessages";
} else if (key.startsWith("historyadminmessages")) {
key = "historyadminmessages";
} else if (key.contains("praiseto") && key.contains("showid")) {
key = "praisetoshowid";
} else if (key.contains("followuser")) {
key = "followuser";
} else if (key.startsWith("user_relations")) {
key = "user_relations";
} else if (key.startsWith("user_relation_")) {
key = "user_relation_";
} else {
char c;
boolean isFindDecollator = false, isKnowBusiness = false;
for (; i < key.length(); i++) {
c = key.charAt(i);
if (key.charAt(i) == '_') {
isFindDecollator = true;
}
if (c == ':') {
isFindDecollator = true;
key = key.substring(0, i);
break;
} else if (isFindDecollator && i > 0 && c >= '0' && c <= '9') {
key = key.substring(0, i);
isKnowBusiness = true;
break;
}
}
if (!isKnowBusiness && !isFindDecollator) {//没有加业务前缀
ramUnknowKey.append(key).append(',');
key = "unknown";
}
}
AtomicLong sizeCount = ramSizeCount.get(key);
if (null == sizeCount) {
sizeCount = new AtomicLong();
ramSizeCount.put(key, sizeCount);
}
sizeCount.addAndGet(Long.valueOf(debug));
AtomicLong keyCount = ramKeyCount.get(key);
if (null == keyCount) {
keyCount = new AtomicLong();
ramKeyCount.put(key, keyCount);
}
keyCount.incrementAndGet();
scanCount.incrementAndGet();
if (scanCount.get() % 100000 == 0) {
System.out.print("scan key size:" + scanCount.get());
writeRamInfo();
}
}
} while ((!"0".equals(cursor)));
}
}, node + "-raminfo");
exportTheadList.add(exportThread);
exportThread.start();
}
/**
* 安全删除key
*/
public void safeDelete(String keyPre, final long toDelCount) {
final String[] delKeyPres = keyPre.split(",");
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss");
final String filePath = SystemConf.confFileDir + "safeDeleted-" + keyPre + "-" + formatter.format(new Date()) + ".txt";
Iterator<Entry<String, JedisPool>> nodes = cluster.getClusterNodes().entrySet().iterator();
List<Thread> delThreadList = new ArrayList<Thread>();
while (nodes.hasNext()) {
Entry<String, JedisPool> entry = nodes.next();
final Jedis nodeCli = entry.getValue().getResource();
String info = entry.getValue().getResource().info();
if (info.contains("role:slave")) {
continue;
}
Thread delThread = new Thread(new Runnable() {
@Override
public void run() {
String cursor = "0";
do {
ScanResult<String> keys = nodeCli.scan(cursor, sp);
cursor = keys.getStringCursor();
List<String> result = keys.getResult();
for (String key : result) {
scanCount.incrementAndGet();
for (String keyPreDel : delKeyPres) {
if (key.startsWith(keyPreDel)) {
if (delCount.incrementAndGet() <= toDelCount) {
if (exportKeys(key, filePath)) {
nodeCli.del(key);//删除数据
}
} else {
delCount.decrementAndGet();
break;
}
}
}
if (scanCount.get() % 10000 == 0) {
System.out.println("scanKeySize:" + scanCount.get() + " delKeySize:" + delCount.get());
}
}
} while ((!"0".equals(cursor)) && delCount.get() < toDelCount);
}
}, entry.getKey() + "delThread");
delThreadList.add(delThread);
delThread.start();
}
waitThread(delThreadList);
long useTime = System.currentTimeMillis() - writeBeginTime, totalCount = scanCount.get();
float speed = (float) (totalCount / (useTime / 1000.0));
System.out.println("scanCount:" + totalCount + " delCount:" + delCount.get() + " speed:"
+ speedFormat.format(speed) + " useTime:" + (useTime / 1000.0) + "s");
System.out.println("restoreCmd: java -jar redis-cluster-manager-jar-with-dependencies.jar import " + keyPre + " " + filePath);
}
static void waitThread(List<Thread> delThreadList) {
for (Thread thread : delThreadList) {
do {
if (thread.isAlive()) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
} while (thread.isAlive());
}
}
private void bakupNode(String filePath) {
Jedis jedis = new Jedis(REDIS_HOST, REDIS_PORT);
String nodes = jedis.clusterNodes();
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter(filePath));
for (String node : nodes.split("\n")) {
bw.write(node);
bw.write("\\n");
}
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
private static String trim(String sb) {
String hostTrim = sb.toString().replace(" ", "");
hostTrim = hostTrim.replace("\r", "");
hostTrim = hostTrim.replace("\n", "");
hostTrim = hostTrim.replace("\\", "");
return hostTrim;
}
//"reshard", "172.20.162.87:29000", "0-1024;1025-2048"
@SuppressWarnings({"rawtypes", "unchecked"})
private void reshard(String[] args) {
Jedis jedis = new Jedis(REDIS_HOST, REDIS_PORT);
Jedis destinationNode = connect(args[1]);
String[] destinationHostInfo = args[1].split(":");
String destinationHost = destinationHostInfo[0];
int destinationPort = Integer.valueOf(destinationHostInfo[1]);
String nodes = jedis.clusterNodes();
Map<String, String> host2NodeId = new HashMap<String, String>();
String destination_node_id = null;
List<Jedis> clusterHostList = new ArrayList<Jedis>();
for (String node : nodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
String nodeId = nodeInfo[0];
String host = nodeInfo[1];
String type = nodeInfo[2];
String[] hostInfo = nodeInfo[1].split(":");
clusterHostList.add(new Jedis(hostInfo[0], Integer.parseInt(hostInfo[1])));
if (args[1].equals(host)) {
destination_node_id = nodeId;
if (type.contains("master")) {
destination_node_id = nodeId;
} else {
System.out.println(args[1] + " is not master !");
jedis.close();
return;
}
}
if (type.contains("master")) {
host2NodeId.put(host, nodeId);
}
}
if (null == destination_node_id) {
System.out.println(args[1] + " destination_node_id not found");
jedis.close();
return;
}
byte[] coverSlot = new byte[16384];
Map<Integer, Jedis> slot2Host = new HashMap<Integer, Jedis>();
Map<Integer, String> slot2NodeId = new HashMap<Integer, String>();
Map<String, Jedis> host2Jedis = new HashMap<String, Jedis>();
List<Object> slotInfos = jedis.clusterSlots();
for (Object slotInfo : slotInfos) {
List<Object> slotInfoList = (List<Object>) slotInfo;
long begin = (Long) slotInfoList.get(0);
long end = (Long) slotInfoList.get(1);
List hostInfoList = (ArrayList) slotInfoList.get(2);
String host = new String((byte[]) hostInfoList.get(0));
int port = Integer.valueOf(hostInfoList.get(1).toString());
String hostInfo = host + ":" + port;
for (int i = (int) begin; i <= end; i++) {
coverSlot[i] = 1;
Jedis jedisHost = host2Jedis.get(hostInfo);
if (null == jedisHost) {
jedisHost = new Jedis(host, port);
host2Jedis.put(hostInfo, jedisHost);
}
slot2Host.put(i, jedisHost);
slot2NodeId.put(i, host2NodeId.get(hostInfo));
}
}
String[] slots2Migrating = args[2].split(";");
int slotBegin = 0, slotEnd = 0;
int timeout = 15000, migratCount = 10;
for (String slotRange : slots2Migrating) {
String[] slotInfo = slotRange.split("-");
slotBegin = Integer.valueOf(slotInfo[0]);
if (slotInfo.length == 1) {
slotEnd = slotBegin;
} else if (slotInfo.length == 2) {
slotEnd = Integer.valueOf(slotInfo[1]);
} else {
System.out.println("参数错误!");
jedis.close();
return;
}
System.out.println("migrate slot " + slotRange + " ...");
for (int slot = slotBegin; slot <= slotEnd; slot++) {
Jedis sourceNode = slot2Host.get(slot);
String source_node_id = slot2NodeId.get(slot);
if (null == source_node_id) {
System.out.println(slot + " source_node_id not found");
continue;
}
if (source_node_id.equals(destination_node_id)) {//同一主机
continue;
}
destinationNode.clusterSetSlotImporting(slot, source_node_id);//step 1 必需在第二步前
sourceNode.clusterSetSlotMigrating(slot, destination_node_id);//step 2
List<String> keysInSlot;
do {
keysInSlot = sourceNode.clusterGetKeysInSlot(slot, migratCount);
for (String key : keysInSlot) {
try {
sourceNode.migrate(destinationHost, destinationPort, key, 0, timeout);////step 3
} catch (RuntimeException e) {
String msg = e.getMessage();
e.printStackTrace();
if (msg.contains("BUSYKEY Target key name already exists")) {
System.out.println(key + " BUSYKEY Target key name already exists");
continue;
}
System.out.println("迁移终止,当前slot:" + slot + " key:" + key);
return;
}
}
} while (keysInSlot.size() != 0);
try {
//如果目标节点变成从节点会存在slot丢失
String checkNodes = destinationNode.clusterNodes();
if (!checkNodes.contains("myself,master")) {
System.out.println("目标节点不是主节点,迁移终止,当前slot位置:" + slot);
return;
}
sourceNode.clusterSetSlotNode(slot, destination_node_id);//step 4 source or destination
destinationNode.clusterSetSlotNode(slot, destination_node_id);//
} catch (redis.clients.jedis.exceptions.JedisDataException e) {
String msg = e.getMessage();
if (msg.contains("I still hold keys")) {
slot--; //高写入的情况下可能写入了新数据,否则数据会发生丢失
System.out.println(slot + ",I still hold keys,try again");
continue;
} else {
e.printStackTrace();
System.out.println("有节点失败,迁移终止,当前slot位置:" + slot);
return;
}
} catch (Throwable e) {
e.printStackTrace();
System.out.println("有节点失败,迁移终止,当前slot位置:" + slot);
return;
}
for (Jedis notify : clusterHostList) {
try {
notify.clusterSetSlotNode(slot, destination_node_id);
} catch (Throwable e) {
e.printStackTrace();
System.out.println("有节点失败,迁移终止,当前slot位置:" + slot);
return;
}
}
//必须强一致性,否则正在迁移的两个节点有失败,solt信息来不及同步会存在丢失slot的情况
for (Jedis notify : clusterHostList) {
int waitCount = 0;
boolean isSync = false;
String nodeCheck = null;
do {
try {
nodeCheck = notify.clusterInfo();
} catch (redis.clients.jedis.exceptions.JedisConnectionException e) {
e.printStackTrace();
System.out.println("有节点失败,迁移终止,当前slot位置:" + slot);
}
isSync = nodeCheck.contains("cluster_slots_ok:16384");
if (!isSync) {
waitCount++;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
System.out.println("wait conf sync " + waitCount + " ...");
}
} while (!isSync);
}
if (slot % 1 == 0) {//5000W个key的迁移速度还是比较慢的
System.out.println("migrate slot " + slot + " done");
}
}
System.out.println("migrate slot " + slotRange + " done");
}
destinationNode.close();
jedis.close();
}
private void create(RedisClusterManager rcm, String[] master2slave) {
String[] masterHost = master2slave[0].split("->");
String[] hostInfo = masterHost[0].split(":");
String host = hostInfo[0];
int port = Integer.parseInt(hostInfo[1]);
//meet
for (int i = 0; i < master2slave.length; i++) {
String[] hostsInfo = master2slave[i].split("->");
if (hostsInfo.length == 2) {
Jedis clusterNode = connect(hostsInfo[0]);
Jedis slaveNode = connect(hostsInfo[1]);
try {
clusterNode.clusterMeet(host, port);
clusterNode.close();
slaveNode.clusterMeet(host, port);
slaveNode.close();
} catch (redis.clients.jedis.exceptions.JedisConnectionException e) {
System.out.println(hostsInfo[1] + " clusterMeet connect error!");
}
} else {
System.out.println("请输入要添加的节点及主节点列表");
}
}
System.out.println("cluster send meet all!");
//set slot
int slot = 16384 / master2slave.length;
int slotIndex = 0;
for (int i = 0; i < master2slave.length; i++) {
String[] hostsInfo = master2slave[i].split("->");
Jedis clusterNode = connect(hostsInfo[0]);
int thisBegin = slotIndex;
for (; slotIndex <= (i + 1) * slot; slotIndex++) {
try {
clusterNode.clusterAddSlots(slotIndex);
} catch (redis.clients.jedis.exceptions.JedisDataException e) {
String msg = e.getMessage();
if (msg.contains("is already busy")) {
} else {
e.printStackTrace();
}
} catch (redis.clients.jedis.exceptions.JedisConnectionException e2) {
System.out.println(hostsInfo[0] + " clusterAddSlots connect error!");
}
}
if (i == master2slave.length - 1) {//最后一个节点进行slot补全
for (; slotIndex < 16384; slotIndex++) {
try {
clusterNode.clusterAddSlots(slotIndex);
} catch (redis.clients.jedis.exceptions.JedisDataException e) {
String msg = e.getMessage();
if (msg.contains("is already busy")) {
} else {
e.printStackTrace();
}
} catch (redis.clients.jedis.exceptions.JedisConnectionException e2) {
System.out.println(hostsInfo[0] + " clusterAddSlots connect error!");
}
}
}
System.out.println(hostsInfo[0] + " set slots " + thisBegin + "-" + (slotIndex - 1));
clusterNode.close();
}
//set slave
for (int i = 0; i < master2slave.length; i++) {
String[] hostsInfo = master2slave[i].split("->");
rcm.addSlave(hostsInfo[0], hostsInfo[1], true);
}
}
private Jedis connect(String hostPort) {
String[] hostInfo = hostPort.split(":");
return new Jedis(hostInfo[0], Integer.parseInt(hostInfo[1]));
}
private void failOver(String slaveNode) throws Exception {
String[] masterHostInfo = slaveNode.split(":");
Jedis fixNode = new Jedis(masterHostInfo[0], Integer.parseInt(masterHostInfo[1]));
try {
String clusterNode;
int tryCount = 0;
do {
fixNode.clusterFailover();//不是100%起作用
Thread.sleep(500);
tryCount++;
clusterNode = fixNode.clusterNodes();
if (tryCount > 1) {
System.out.println(slaveNode + " tryCount:" + tryCount);
}
} while (clusterNode.contains("myself,slave"));//保证踢成功
System.out.println(slaveNode + " failover success!");
} catch (redis.clients.jedis.exceptions.JedisDataException e) {
String msg = e.getMessage();
if (msg.contains("CLUSTER FAILOVER to a slave")) {
System.out.println(slaveNode + " is master, You should send CLUSTER FAILOVER to a slave");
}
} catch (redis.clients.jedis.exceptions.JedisConnectionException e2) {
String msg = e2.getMessage();
if (msg.contains("connect timed out")) {
System.out.println(slaveNode + " : connect timed out");
}
}
fixNode.close();
}
private void fixSlotStable() {
Jedis fixNode = new Jedis(REDIS_HOST, REDIS_PORT);
byte[] coverSlot = new byte[16384];
List<Object> slotInfos = fixNode.clusterSlots();
fixNode.close();
for (Object slotInfo : slotInfos) {//检查删除节点是否含有slot
@SuppressWarnings("unchecked")
List<Object> slotInfoList = (List<Object>) slotInfo;
long begin = (Long) slotInfoList.get(0);
long end = (Long) slotInfoList.get(1);
for (int i = (int) begin; i <= end; i++) {
coverSlot[i] = 1;
fixNode.clusterSetSlotStable(i);//Clear any importing / migrating state from hash slot.
}
}
}
/**
* 使用指定主机修复没有cover的slot
*
* @param masterNode
*/
@SuppressWarnings("unchecked")
private void fixSlotCover(String masterNode) {
String[] masterHostInfo = masterNode.split(":");
Jedis fixNode = new Jedis(masterHostInfo[0], Integer.parseInt(masterHostInfo[1]));
byte[] coverSlot = new byte[16384];
List<Object> slotInfos = fixNode.clusterSlots();
for (Object slotInfo : slotInfos) {//检查删除节点是否含有slot
List<Object> slotInfoList = (List<Object>) slotInfo;
long begin = (Long) slotInfoList.get(0);
long end = (Long) slotInfoList.get(1);
// String host = new String((byte[]) hostInfo.get(0));
// long port = (long) hostInfo.get(1);
for (int i = (int) begin; i <= end; i++) {
coverSlot[i] = 1;
//fixNode.clusterSetSlotStable(i);//Clear any importing / migrating state from hash slot.
}
}
int begin = -1;
for (int i = 0; i < 16384; i++) {
if (coverSlot[i] == 0) {
fixNode.clusterAddSlots(i);
}
if (coverSlot[i] == 0 && begin == -1) {
begin = i;
} else if ((coverSlot[i] == 1 && begin > -1) || i == 16384) {
System.out.println("cluster_slots_fixed:" + begin + "-" + i);
begin = -1;
}
}
fixNode.close();
}
static class TestClass implements Runnable {
List<JSONObject> benckmarkData = new ArrayList<JSONObject>();
private int threadNum;
private String key;
private long offset;
private long dataCount;
public TestClass(int threadNum, String key, String value, long offset, long dataCount) {
this.key = key;
this.offset = offset;
this.dataCount = dataCount;
this.threadNum = threadNum;
}
public void run() {
long beginTime = System.currentTimeMillis(), lastCountTime = System.currentTimeMillis();
long lastCount = 0;
long lastBreakTime = 0;
int errorCount = 0;
for (long i = offset; i < dataCount; i++) {
try {
cluster.set(key + "_" + i, "");//节点可能关闭
} catch (Exception e) {
errorCount++;
if (lastBreakTime == 0) {
lastBreakTime = System.currentTimeMillis();
}
System.out.println("errorCount:" + errorCount);
}
if (lastBreakTime > 0) {
System.out
.println(threadNum + "reconnect use time:" + (System.currentTimeMillis() - lastBreakTime));
lastBreakTime = 0;
}
if (i % 5000 == 0) {
long useTime = System.currentTimeMillis() - lastCountTime;
System.out.println(threadNum + " set total:" + i + " speed:"
+ ((i - lastCount) / (useTime / 1000.0)));
lastCountTime = System.currentTimeMillis();
lastCount = i;
}
}
long useTime = System.currentTimeMillis() - beginTime;
System.out.println(threadNum + " set use time:" + useTime + " speed:"
+ ((dataCount - offset) / (useTime / 1000.0)));
}
}
private static void printHelp() {
System.out.println("java -jar redis-cluster-util-jar-with-dependencies.jar arg1 arg2 ...");
System.out.println("add-master \t:[host:port;host2:port2]add master list");
System.out.println("add-slave \t:[maser->slave;master2->slave2;...]master->slave");
System.out.println("analyze \t:" + MonitorUtil.helpInfo);
System.out.println("bakup-node \t:[file path]file path to save");
System.out
.println("benchmark \t:java -cp redis-cluster-util-jar-with-dependencies.jar com.jumei.util.Benchmark key value offset limit threadCount [all|set|get]");
System.out.println("check \t:check cluster status");
System.out.println("count \t:[keyPattern] count key count use keyPattern");
System.out.println("create \t:[maser->slave;master2->slave2;...] create cluster");
System.out.println("del \t:[key] del one key");
System.out.println("dels \t:[keyPattern][delKeyFileSavePath] del use keyPattern");
System.out.println("del-node \t:[host:port]");
System.out.println("del-node-id \t:[node-id]del node use id");
System.out.println("export \t:[keyPattern][outputFilePath] use * to export all");
System.out.println("exporth \t:[keyPattern][outputFilePath] export one host data, use * to export all");
System.out.println("export-keys \t:[key1,key2][outputFilePath]");
System.out.println("export-keys-file \t:[input keys file][outputFilePath]");
System.out.println("failover \t:[host:port;host2:port2] slave failover");
System.out.println("fix-slot-cover \t:[host:port] use one node to fix uncovered slot ");
System.out.println("fix-slot-stable \t:clear any importing / migrating state from hash slot");
System.out.println("flush \t:use flushall to clean cluster all data (be careful!)");
System.out.println("get \t:[key] get a string type value");
System.out
.println("import \t:[keyPattern][importFilePath] import if key not contains but list use mrege, use * to import all");
System.out.println("info \t:(ops,input,output,ram) query db info ");
System.out.println("keys \t:query use keyPattern");
System.out.println("keysize :count cluster all key");
System.out.println("monitor :[sleep second] monitor cluster status");
System.out.println("querys \t:query use pattern");
System.out.println("reshard \t:[host:port](master) [1-1024;1025-2048](slot range)");
System.out.println("raminfo \t:[host:port]default all node raminfo analysis");
System.out.println("set \t:[key][value] set a string type value");
System.out.println("safe-delete \t:[keyPre][delCount]");
System.out.println("others \t:use redis-cli to execute others command(linux only)");
}
/**
* 执行shell
*
* @param cmd
*/
public static void executeCmd(String cmd) {
if (null != cmd) {
System.out.println("exec cmd: " + cmd);
if (!SystemConf.isWindos) {
Runtime rt = Runtime.getRuntime();
try {
long beginTime = System.currentTimeMillis();
Process process = rt.exec(cmd);
StreamGobbler errorGobbler = new StreamGobbler(process.getErrorStream(), "ERROR");
StreamGobbler outputGobbler = new StreamGobbler(process.getInputStream(), "INFO");
errorGobbler.start();
outputGobbler.start();
System.out.println(cmd + " useTime:" + (System.currentTimeMillis() - beginTime));
while (errorGobbler.isAlive() || outputGobbler.isAlive()) {
Thread.sleep(1);
}
process.waitFor();
process.destroy();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
private void flushall() {
Iterator<Entry<String, JedisPool>> nodes = cluster.getClusterNodes().entrySet().iterator();
while (nodes.hasNext()) {
Entry<String, JedisPool> entry = nodes.next();
Jedis jedis = entry.getValue().getResource();
try {
jedis.flushAll();
System.out.println(entry.getKey() + " flushAll success");
} catch (Exception e) {
String msg = e.getMessage();
if (msg.contains("Read timed out")) {
System.out.println(entry.getKey() + " flushAll fail");
} else if (msg.contains("READONLY")) {//slave
} else {
e.printStackTrace();
}
}
}
}
@SuppressWarnings("unchecked")
private void check() {
Jedis clusterMaster = new Jedis(REDIS_HOST, REDIS_PORT, 10000);
String nodes = clusterMaster.clusterNodes();
Map<String, String> slave2host = new TreeMap<String, String>();
Map<String, String> host2slave = new TreeMap<String, String>();
Map<String, String> master2host = new TreeMap<String, String>();
Map<String, String> host2master = new TreeMap<String, String>();
Map<String, String> master2slave = new TreeMap<String, String>();
for (String node : nodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
String type = nodeInfo[2];
if (type.contains("master")) {
master2host.put(nodeInfo[0], nodeInfo[1]);
host2master.put(nodeInfo[1], nodeInfo[0]);
master2slave.put(nodeInfo[1], "warn");
}
}
for (String node : nodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
String type = nodeInfo[2];
if (type.contains("slave")) {
slave2host.put(nodeInfo[0], nodeInfo[1]);
host2slave.put(nodeInfo[1], nodeInfo[0]);
String masterHost = master2host.get(nodeInfo[3]);
if (null != masterHost) {
master2slave.put(masterHost, nodeInfo[1]);
} else {
System.out.println("master not found:" + nodeInfo[1]);
}
}
}
Iterator<Entry<String, String>> it = master2slave.entrySet().iterator();
StringBuffer slaveCheck = new StringBuffer("==== slave status check info ====");
boolean slaveCheckErrorFind = false;
while (it.hasNext()) {
Entry<String, String> entry = it.next();
String key = entry.getKey();
String value = entry.getValue();
if ("warn".equals(value)) {
slaveCheckErrorFind = true;
slaveCheck.append("\r\n" + entry.getKey() + " no slave");
continue;
}
String[] masterHostInfo = key.split(":");
String[] slaveHostInfo = value.split(":");
if (masterHostInfo[0].equals(slaveHostInfo[0]) || !masterHostInfo[1].equals(slaveHostInfo[1])) {//同一主机或端口不一致
slaveCheck.append("\r\n" + entry.getKey() + " slave ");
if (":0".equals(value)) {
slaveCheck.append("disconnected");
} else {
slaveCheck.append(value + " warn");
}
slaveCheckErrorFind = true;
} else {
slaveCheck.append("\r\n" + entry.getKey() + "->" + value);
}
}
if (slaveCheckErrorFind) {
slaveCheck.insert("==== slave status check info ====".length(), "error");
} else {
slaveCheck.insert("==== slave status check info ====".length(), "ok");
}
System.out.println(slaveCheck);
StringBuffer nodeFailCheck = new StringBuffer("==== node status check info ====");
boolean failCheckFind = false;
for (String node : nodes.split("\n")) {
if (node.contains("fail") || node.contains(":0")) {
nodeFailCheck.append("\r\n" + node);
failCheckFind = true;
}
}
if (!failCheckFind) {
nodeFailCheck.append("ok");
}
System.out.println(nodeFailCheck);
String clusterInf = clusterMaster.clusterInfo();
if (clusterInf.contains("cluster_state:ok") && clusterInf.contains("cluster_slots_ok:16384")) {
System.out.println("==== cluster info ====OK");
} else {
System.out.println("==== cluster info ====");
List<Object> slotInfos = clusterMaster.clusterSlots();
byte[] coverSlot = new byte[16384];
for (Object slotInfo : slotInfos) {//检查删除节点是否含有slot
List<Object> slotInfoList = (List<Object>) slotInfo;
long begin = (Long) slotInfoList.get(0);
long end = (Long) slotInfoList.get(1);
for (int i = (int) begin; i <= end; i++) {
coverSlot[i] = 1;
}
}
int begin = -1;
for (int i = 0; i < 16384; i++) {
/*if (coverSlot[i] == 0) {
System.out.println("cluster_slots_lost:" + i);
}*/
if (coverSlot[i] == 0 && begin == -1) {
if (i == 16383 || coverSlot[i + 1] == 1) {//只丢失了一个slot
System.out.println("cluster_slots_lost:" + i);
} else {
begin = i;
}
} else if ((coverSlot[i] == 1 && begin > -1)) {
System.out.println("cluster_slots_lost_range:" + begin + "-" + i);
begin = -1;
}
}
System.out.println(clusterInf);
}
clusterMaster.close();
}
private void addMaster(String[] args) {
Jedis clusterNode = new Jedis(REDIS_HOST, REDIS_PORT);
List<Jedis> addHostList = new ArrayList<Jedis>();
String nodes = null;
String[] addMasterNodes = trim(args[1]).split(";");
for (String addMasterNode : addMasterNodes) {
String[] addHostInfo = addMasterNode.split(":");
Jedis addNode = new Jedis(addHostInfo[0], Integer.parseInt(addHostInfo[1]));
try {
nodes = addNode.clusterNodes();
addHostList.add(addNode);
} catch (redis.clients.jedis.exceptions.JedisConnectionException e) {
System.out.println(addMasterNode + " connect error!");
continue;
}
int nodeCount = 0;
String addNodeId = null;
for (String node : nodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
if (node.contains("myself")) {
addNodeId = nodeInfo[0];
}
nodeCount++;
}
if (null == addNodeId) {
System.out.println("nodeId not found!");
return;
}
if (nodeCount > 1) {
System.out.println(addMasterNode + " this is not new node,use cmd to remove old node info");
System.out.println("cd /home/redis/" + addHostInfo[1]
+ " && rm -f dump.rdb appendonly.aof nodes.conf redis.log && service redis-node-"
+ addHostInfo[1] + " restart");
return;
}
}
for (Jedis addHost : addHostList) {
boolean meetSeccuss = false;
addHost.clusterMeet(REDIS_HOST, REDIS_PORT);
while (!meetSeccuss) {
try {
Thread.sleep(100);//估计需要100ms
} catch (InterruptedException e) {
e.printStackTrace();
}
nodes = clusterNode.clusterNodes();
if (nodes.contains(getJedisHostInfo(addHost))) {//从cluster里检查是否包含本信息
meetSeccuss = true;
}
if (!meetSeccuss) {
System.out.println(getJedisHostInfo(addHost) + " wait meet to seccuss ...");
} else {
System.out.println(getJedisHostInfo(addHost) + " add master seccussed!");
}
}
}
clusterNode.close();
}
private void addSlave(String masterNode, String slaveNode, boolean isCreateCluster) {
String[] masterHostInfo = masterNode.split(":");
Jedis master = new Jedis(masterHostInfo[0], Integer.parseInt(masterHostInfo[1]));
String nodes = master.clusterNodes();
String masterNodeId = null;
List<Jedis> clusterHostList = new ArrayList<Jedis>();
for (String node : nodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
String[] hostInfo = nodeInfo[1].split(":");
if (masterNode.equals(nodeInfo[1])) {
masterNodeId = nodeInfo[0];
}
int port = Integer.parseInt(hostInfo[1]);
if (port > 0) {
clusterHostList.add(new Jedis(hostInfo[0], port));
} else {
//System.out.println("not connected:" + node);//可能存在没有连上的节点
}
}
String[] addHostInfo = slaveNode.split(":");
Jedis slave = new Jedis(addHostInfo[0], Integer.parseInt(addHostInfo[1]));
nodes = slave.clusterNodes();
int nodeCount = 0;
String addNodeId = null;
for (String node : nodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
if (node.contains("myself")) {
addNodeId = nodeInfo[0];
}
nodeCount++;
}
if (null == addNodeId) {
System.out.println("nodeId not found");
slave.close();
master.close();
return;
}
if (nodeCount > 1 && !isCreateCluster) {
System.out.println(slaveNode + " this is not new node,use cmd to remove old node info");
System.out.println("cd /home/redis/" + addHostInfo[1]
+ " && rm -f dump.rdb appendonly.aof nodes.conf redis.log && service redis-node-" + addHostInfo[1]
+ " restart");
slave.close();
master.close();
return;
}
if (null == masterNodeId) {
System.out.println("not found master node with host:" + masterNode);
slave.close();
master.close();
return;
}
slave.clusterMeet(masterHostInfo[0], Integer.parseInt(masterHostInfo[1]));
boolean meetSeccuss = false;
while (!meetSeccuss) {
nodes = slave.clusterNodes();
if (nodes.contains(masterNodeId)) {
meetSeccuss = true;
}
if (!meetSeccuss) {
System.out.println(masterNode + " wait slave meet success ...");
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
try {
slave.clusterReplicate(masterNodeId);
} catch (redis.clients.jedis.exceptions.JedisDataException e) {
String msg = e.getMessage();
String print = "only replicate a master, not a slave";
if (msg.contains(print)) {
System.out.println(masterNode + " " + print);
} else {
e.printStackTrace();
}
}
//check
for (Jedis host : clusterHostList) {
boolean isAddSuccess = false;
do {
String checkNodes = null;
try {
checkNodes = host.clusterNodes();
} catch (redis.clients.jedis.exceptions.JedisConnectionException e) {
System.out.println(getJedisHostInfo(host) + " check slave connect error");
continue;
}
for (String node : checkNodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
if (slaveNode.equals(nodeInfo[1])) {
isAddSuccess = true;
break;
}
}
if (!isAddSuccess) {
System.out.println(getJedisHostInfo(host) + " wait nodes.conf sync ...");
try {
Thread.sleep(300);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
} while (!isAddSuccess);
}
master.close();
slave.close();
}
/**
* 删除从节点或没有slot的主节点,失败的节点可以删除
*
* @param delNode
*/
@SuppressWarnings("unchecked")
private void delNode(String delNode) {
Jedis checkMaster = new Jedis(REDIS_HOST, REDIS_PORT);
String clusterNodes = checkMaster.clusterNodes();
if (!clusterNodes.contains(delNode)) {
checkMaster.close();
System.out.println(delNode + " not in cluster!");
return;
}
if (!":0".equals(delNode)) {//掉线主机直接删除 ,TODO有bug
List<Object> slotInfos = checkMaster.clusterSlots();
for (Object slotInfo : slotInfos) {//检查删除节点是否含有slot
List<Object> slotInfoList = (List<Object>) slotInfo;
for (int i = 2; i < slotInfoList.size(); i++) {
List<Object> slotHostInfo = (List<Object>) slotInfoList.get(i);
String host = new String((byte[]) slotHostInfo.get(0));
long port = (Long) slotHostInfo.get(1);
String hostPort = host + ":" + port;
String isMasterCheck = hostPort + " master";
if ((hostPort.equals(delNode) && clusterNodes.contains(isMasterCheck))) {//master有slot不能删除
System.out.println(hostPort + " del fail contain slot " + slotInfoList.get(0) + "-"
+ slotInfoList.get(1));
checkMaster.close();
return;
}
}
}
}
List<String> delNodeIds = new ArrayList<String>();//:0 如果不在线是这种格式可能存在多个主机
List<Jedis> clusterHostList = new ArrayList<Jedis>();
for (String node : clusterNodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
String[] hostInfo = nodeInfo[1].split(":");
if (delNode.equals(nodeInfo[1])) {
delNodeIds.add(nodeInfo[0]);
} else {
clusterHostList.add(new Jedis(hostInfo[0], Integer.parseInt(hostInfo[1])));
}
}
if (delNodeIds.size() > 0) {
for (String delNodeId : delNodeIds) {
for (Jedis host : clusterHostList) {
String hostInfo = getJedisHostInfo(host);
try {
host.clusterForget(delNodeId);
System.out.println(hostInfo + " send forget sucess");
} catch (redis.clients.jedis.exceptions.JedisDataException e) {
String msg = e.getMessage();
if (null != msg && msg.contains("Unknown node")) {
System.out.println(hostInfo + " not found");
} else {
System.out.println(hostInfo + " send forget fail");
e.printStackTrace();
}
} catch (redis.clients.jedis.exceptions.JedisConnectionException e) {
String msg = e.getMessage();
if (null != msg && msg.contains("Connection refused")) {
System.out.println(hostInfo + " 主机连不上,请手动清空除该节点对应node配置,否则当前主机重新加入集群进会带入被踢出的节点信息!");
} else {
e.printStackTrace();
}
}
}
//check
for (Jedis host : clusterHostList) {
boolean isDelSuccess = false;
while (!isDelSuccess) {
String checkNodes = checkMaster.clusterNodes();
if (checkNodes.contains(delNodeId)) {
System.out.println(getJedisHostInfo(host) + " wait delete success ...");
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
} else {
isDelSuccess = true;
}
}
}
String[] delHostInfo = delNode.split(":");
int port = Integer.parseInt(delHostInfo[1]);
if (port > 0) {
try {
Jedis jedis = new Jedis(delHostInfo[0], Integer.parseInt(delHostInfo[1]));
jedis.shutdown();
System.out.println(delNode + " has shutdown!");
jedis.close();
} catch (redis.clients.jedis.exceptions.JedisConnectionException e) {
System.out.println(delNode + ",无法连接,请手动进行关闭!");
}
}
System.out.println(delNode + " delete success, please remove nodes.conf file!");
}
}
checkMaster.close();
}
private String getJedisHostInfo(Jedis host) {
return host.getClient().getHost() + ":" + host.getClient().getPort();
}
private void opt(String[] args) {
JedisCluster jedisCluster;
Set<HostAndPort> jedisClusterNodes;
JedisPoolConfig pool;
jedisClusterNodes = new HashSet<HostAndPort>();
jedisClusterNodes.add(new HostAndPort(REDIS_HOST, REDIS_PORT));
pool = new JedisPoolConfig();
pool.setMaxTotal(100);
jedisCluster = new JedisCluster(jedisClusterNodes, pool);
long beginTime = System.currentTimeMillis();
if ("del".equals(args[0])) {
for (int i = 1; i < args.length; i++) {
jedisCluster.del(args[i]);
}
} else if ("set".equals(args[0])) {
jedisCluster.set(args[1], args[2]);
} else if ("get".equals(args[0])) {
for (int i = 1; i < args.length; i++) {
System.out.println(args[i] + "->" + jedisCluster.get(args[i]));
}
}
System.out.println("opt useTime->" + ((System.currentTimeMillis() - beginTime)) + "ms ");
try {
jedisCluster.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public Jedis getOldRedis(String key) {
return oldRedisMap.get(JedisClusterCRC16.getCRC16(key) % 16384);
}
/**
* 恢复关注队列和粉丝队列
*/
private void followRestoreByUids(String uids) {
String[] uidsArray = uids.split(",");
String KEY_USER_ATTENTION_ZSET = "u_a_";
String KEY_USER_FANS_ZSET = "u_f_";
for (String uid : uidsArray) {
if (uid.length() == 0) {
continue;//无效的uid
}
try {
//恢复关注
String keyAttention = KEY_USER_ATTENTION_ZSET + uid;//此用户的关注队列
int slot = JedisClusterCRC16.getCRC16(keyAttention) % 16384;
Jedis oldJedis = oldRedisMap.get(slot);
Set<Tuple> attentionUids = oldJedis.zrangeWithScores(keyAttention, 0, -1);
if (attentionUids != null && attentionUids.size() > 0) {
for (Tuple t : attentionUids) {
String attentionUid = t.getElement(); //被关注人的uid
double score = t.getScore();
cluster.zadd(keyAttention, score, attentionUid);
cluster.zadd(KEY_USER_FANS_ZSET + attentionUid, score, uid);//加入被关注人的粉丝队列
}
}
//恢复粉丝
String keyFans = KEY_USER_FANS_ZSET + uid;//此用户的粉丝队列
int slot2222 = JedisClusterCRC16.getCRC16(keyFans) % 16384;
Jedis oldJedis2222 = oldRedisMap.get(slot2222);
Set<Tuple> fansUids = oldJedis2222.zrangeWithScores(keyFans, 0, -1);
if (fansUids != null && fansUids.size() > 0) {
for (Tuple t : fansUids) {
String fansUid = t.getElement(); //粉丝的uid
double score = t.getScore();
cluster.zadd(keyFans, score, fansUid);
cluster.zadd(KEY_USER_ATTENTION_ZSET + fansUid, score, uid);//加入粉丝的关注队列
}
}
} catch (Exception e) {
System.out.println("followRestoreByUids 异常,当前uid:" + uid);
e.printStackTrace();
}
}
}
/**
* 恢复帖子点赞
*/
private void restoreShowPraise(String uids) {
String[] uidsArray = uids.split(",");
String KEY_USER_SHOW_ZSET = "u_s_";//普通帖
String KEY_USER_SHOW_VIDEO_ZSET = "u_s_v_";//视频帖
String KEY_USER_COUNSEL_SHOW_ZSET = "u_counsel_s_";//专栏帖
String KEY_SHOW_PRAISE_SET = "s_p_";//帖子点赞人队列
for (String uid : uidsArray) {
if (uid.length() == 0) {//无效的uid
continue;
}
try {
String key = KEY_USER_SHOW_ZSET + uid;
//遍历普通帖
Set<String> showIdsNormal = oldRedisMap.get(JedisClusterCRC16.getCRC16(key) % 16384).zrange(key, 0, -1);
if (showIdsNormal != null && showIdsNormal.size() > 0) {
for (String showId : showIdsNormal) {
Set<Tuple> tuplesPraise = getOldRedis(KEY_SHOW_PRAISE_SET + showId).zrangeWithScores(
KEY_SHOW_PRAISE_SET + showId, 0, -1);
if (tuplesPraise != null && tuplesPraise.size() > 0) {
for (Tuple t : tuplesPraise) {
String praiseUserId = t.getElement();
double praiseTime = t.getScore();
cluster.zadd(KEY_SHOW_PRAISE_SET + showId, praiseTime, praiseUserId);
}
}
}
}
//遍历视频帖
Set<String> showIdsVideo = getOldRedis(KEY_USER_SHOW_VIDEO_ZSET + uid).zrange(
KEY_USER_SHOW_VIDEO_ZSET + uid, 0, -1);
if (showIdsVideo != null && showIdsVideo.size() > 0) {
for (String showId : showIdsVideo) {
Set<Tuple> tuplesPraise = getOldRedis(KEY_SHOW_PRAISE_SET + showId).zrangeWithScores(
KEY_SHOW_PRAISE_SET + showId, 0, -1);
if (tuplesPraise != null && tuplesPraise.size() > 0) {
for (Tuple t : tuplesPraise) {
String praiseUserId = t.getElement();
double praiseTime = t.getScore();
cluster.zadd(KEY_SHOW_PRAISE_SET + showId, praiseTime, praiseUserId);
}
}
}
}
//遍历专栏帖
Set<String> showIdsCounsel = getOldRedis(KEY_USER_COUNSEL_SHOW_ZSET + uid).zrange(
KEY_USER_COUNSEL_SHOW_ZSET + uid, 0, -1);
if (showIdsCounsel != null && showIdsCounsel.size() > 0) {
for (String showId : showIdsCounsel) {
Set<Tuple> tuplesPraise = getOldRedis(KEY_SHOW_PRAISE_SET + showId).zrangeWithScores(
KEY_SHOW_PRAISE_SET + showId, 0, -1);
if (tuplesPraise != null && tuplesPraise.size() > 0) {
for (Tuple t : tuplesPraise) {
String praiseUserId = t.getElement();
double praiseTime = t.getScore();
cluster.zadd(KEY_SHOW_PRAISE_SET + showId, praiseTime, praiseUserId);
}
}
}
}
} catch (Exception e) {
System.out.println("程序出现异常!当前uid:" + uid + ",异常信息:" + e.getMessage());
}
}
}
static Map<Integer, String> oldRedisSlot2Host = new HashMap<Integer, String>();
static Map<Integer, Jedis> oldRedisMap = new HashMap<Integer, Jedis>();
static {
String host = null;
Integer port = null;
try {
File file = new File(SystemConf.confFileDir + "/oldRedisSlot2Host.txt");
if (file.isFile()) {
BufferedReader br = new BufferedReader(new FileReader(file));
String data;
//10.0.228.31:29006#5243-5897
while ((data = br.readLine()) != null) {
String[] info = data.split(":");
if (info.length == 3) {
host = info[0];
port = Integer.valueOf(info[1]);
Jedis jedis = new Jedis(host, port);
jedis.info();//测试一下是否可通知
String[] soltInfo = info[2].split("-");
int begin = Integer.valueOf(soltInfo[0]);
int end = Integer.valueOf(soltInfo[1]);
for (int i = begin; i <= end; i++) {
oldRedisMap.put(i, jedis);
}
}
}
br.close();
}
} catch (Exception e) {
e.printStackTrace();
System.out.println("nodeError host:" + host + " port:" + port);
}
}
private void restoreUserHash(String uids) {
String[] uidsArray = uids.split(",");
for (String uid : uidsArray) {
if (uid.length() == 0) {//无效果的uid
continue;
}
String key = "u_" + uid;
int slot = JedisClusterCRC16.getCRC16(key) % 16384;
Jedis oldJedis = oldRedisMap.get(slot);
Map<String, String> oldData = oldJedis.hgetAll(key);//这是之前的老数据
if (oldData != null) {
Map<String, String> newData = cluster.hgetAll(key);//这是当前的数据
restoreUserHashSetData(key, "copper", oldData, newData);//恢复铜币
restoreUserHashSetData(key, "gold", oldData, newData);//恢复金币
restoreUserHashSetData(key, "live_empirical_value", oldData, newData);//恢复直播经验值
restoreUserHashSetData(key, "praise_count", oldData, newData);//恢复点赞数
//恢复vip
String oldVip = oldData.get("vip");
String newVip = newData.get("vip");
if (null != newVip || "0".equals(newVip)) {
if (oldVip != null && !"".equals(oldVip) && !"0".equals(oldVip)) {
Map<String, String> userHash = new HashMap<String, String>();
userHash.put("vip", oldVip);
cluster.hmset(key, userHash);
}
}
//恢复认证信息
String oldRecommend_desc = oldData.get("recommend_desc");
String newRecommend_desc = newData.get("recommend_desc");
if (null == newRecommend_desc || newRecommend_desc.length() == 0) {
if (oldRecommend_desc != null && oldRecommend_desc.length() > 0) {
Map<String, String> userHash = new HashMap<String, String>();
userHash.put("recommend_desc", oldRecommend_desc);
cluster.hmset(key, userHash);
}
}
}
}
}
/**
* 根据老数据和新数据,设置最终的数据,主要是针对铜币、金币、经验值和点赞数
*/
private void restoreUserHashSetData(String key, String propertyName, Map<String, String> oldData,
Map<String, String> newData) {
String propertyOldData = oldData.get(propertyName);
if (propertyOldData != null && !"".equals(propertyOldData)) {
String properNewData = newData.get(propertyName);
long total = Long.valueOf(propertyOldData);
if (properNewData != null && !"".equals(properNewData)) {
total += Long.valueOf(properNewData);
}
Map<String, String> userHash = new HashMap<String, String>();
userHash.put(propertyName, total + "");
cluster.hmset(key, userHash);
}
}
}
| redis-cluster-manager/src/main/java/com/huit/util/RedisClusterManager.java | package com.huit.util;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import org.bson.Document;
import redis.clients.jedis.DebugParams;
import redis.clients.jedis.HostAndPort;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisCluster;
import redis.clients.jedis.JedisPool;
import redis.clients.jedis.JedisPoolConfig;
import redis.clients.jedis.ScanParams;
import redis.clients.jedis.ScanResult;
import redis.clients.jedis.Tuple;
import redis.clients.jedis.exceptions.JedisConnectionException;
import redis.clients.util.JedisClusterCRC16;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.mongodb.MongoClient;
import com.mongodb.WriteConcern;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
/**
* redis cluster 管理工具
* @author huit
*
*/
public class RedisClusterManager {
private static String REDIS_HOST = SystemConf.get("REDIS_HOST");
private static int REDIS_PORT = Integer.parseInt(SystemConf.get("REDIS_PORT"));
private static JedisCluster cluster;
static final int DEFAULT_TIMEOUT = 2000;
static final int MAX_REDIRECTIONS = 25;//应该大于等于主节点数
static ScanParams sp = new ScanParams();
static {
sp.count(10000);
}
private static void connectCluser() {
Set<HostAndPort> nodes = new HashSet<HostAndPort>();
nodes.add(new HostAndPort(REDIS_HOST, REDIS_PORT));
JedisPoolConfig poolConfig = new JedisPoolConfig();
poolConfig.setMaxTotal(1000);
poolConfig.setMaxIdle(10);
poolConfig.setMinIdle(1);
poolConfig.setMaxWaitMillis(30000);
poolConfig.setTestOnBorrow(true);
poolConfig.setTestOnReturn(true);
poolConfig.setTestWhileIdle(true);
cluster = new JedisCluster(nodes, DEFAULT_TIMEOUT, MAX_REDIRECTIONS, poolConfig);
}
public RedisClusterManager() {
REDIS_HOST = SystemConf.get("REDIS_HOST");
REDIS_PORT = Integer.valueOf(SystemConf.get("REDIS_PORT"));
}
private static AtomicLong writeCount = new AtomicLong();
private static AtomicLong lastWriteCount = new AtomicLong();
private static AtomicLong readCount = new AtomicLong();
private static AtomicLong delCount = new AtomicLong();
private static AtomicLong checkCount = new AtomicLong();
private static AtomicLong errorCount = new AtomicLong();
private static AtomicLong lastReadCount = new AtomicLong();
private static long writeBeginTime = System.currentTimeMillis(), readLastCountTime, writeLastCountTime;
private static final DecimalFormat speedFormat = new DecimalFormat("#,##0.00");//格式化设置
private static boolean isCompleted = false;
/**
* 删除点赞
* @param importIfNotExit
* @throws Exception
*/
public void praiseDel(final String delKey, final String filePath) throws Exception {
BufferedReader br = new BufferedReader(new FileReader(filePath));
BufferedWriter bw = new BufferedWriter(new FileWriter(filePath + ".deleted"));
String data = null;
long delCount = 0, readCount = 0;
while ((data = br.readLine()) != null) {
readCount++;
Double score = cluster.zscore(delKey, data.trim());
if (null != score) {
long reslut = cluster.zrem(delKey, data.trim());
if (1 == reslut) {
delCount++;
bw.write(data.trim() + "->" + score);
bw.write("\r\n");
}
}
}
br.close();
bw.close();
System.out.println("readCount:" + readCount + " delCount:" + delCount);
}
/**
* 不真正删除,只计算点赞数
* @param importIfNotExit
* @throws Exception
*/
public void praiseCountDel(final String delKey, final String filePath) throws Exception {
BufferedReader br = new BufferedReader(new FileReader(filePath));
BufferedWriter bw = new BufferedWriter(new FileWriter(filePath + ".deleted"));
String data = null;
long markedDelCount = 0, readCount = 0;
while ((data = br.readLine()) != null) {
readCount++;
String value = data.trim();
Double score = cluster.zscore(delKey, value);
if (null != score) {
markedDelCount++;
bw.write(value + "->" + score);
bw.write("\r\n");
}
}
br.close();
bw.close();
System.out.println("checkKey" + delKey + " readCount:" + readCount + " markedCount:" + markedDelCount);
}
/**
* 按照key前缀查询
* @param importIfNotExit
*/
public void praiseCount(final String importKey, final String filePath) {
final List<String> dataQueue = Collections.synchronizedList(new LinkedList<String>());// 待处理数据队列
final Thread[] writeThread = new Thread[cluster.getClusterNodes().size() * 3];//节点数的3倍
final Map<String, AtomicLong> statisticsMap = new TreeMap<String, AtomicLong>();
Thread readThread = new Thread(new Runnable() {
@Override
public void run() {
try {
String cursor = "0";
Date date = new Date();
java.text.DateFormat format1 = new java.text.SimpleDateFormat("yyyy-MM-dd-HH");
do {
ScanResult<Tuple> sscanResult = cluster.zscan(importKey, cursor, sp);
cursor = sscanResult.getStringCursor();
List<Tuple> result = sscanResult.getResult();
double time;
for (Tuple tuple : result) {
dataQueue.add(tuple.getElement());
time = tuple.getScore();
date.setTime((long) (time * 1000));
String key = format1.format(date);
AtomicLong count = statisticsMap.get(key);
if (null == count) {
count = new AtomicLong();
statisticsMap.put(key, count);
}
count.incrementAndGet();
}
long count = readCount.addAndGet(result.size());
if (count % 50000 == 0) {
if (readLastCountTime > 0) {
long useTime = System.currentTimeMillis() - readLastCountTime;
float speed = (float) ((count - lastReadCount.get()) / (useTime / 1000.0));
System.out.println("read count:" + count + " speed:" + speedFormat.format(speed));
}
readLastCountTime = System.currentTimeMillis();
lastReadCount.set(count);
synchronized (dataQueue) {
Collections.shuffle(dataQueue);//导出是按节点导出的,这样可以提升性能
}
while (dataQueue.size() > 100000) {//防止内存写爆了
Thread.sleep(1000);
}
}
} while (!"0".equals(cursor));
synchronized (dataQueue) {
Collections.shuffle(dataQueue);
}
isCompleted = true;
while (!dataQueue.isEmpty()) {//等待数据写入完成
Thread.sleep(500);
}
long useTime = System.currentTimeMillis() - writeBeginTime, totalCount = readCount.get();
float speed = (float) (totalCount / (useTime / 1000.0));
System.out.println("write total:" + totalCount + " speed:" + speedFormat.format(speed)
+ " useTime:" + (useTime / 1000.0) + "s");
for (int i = 0; i <= writeThread.length - 1; i++) {
writeThread[i].interrupt();
}
} catch (Exception e) {
e.printStackTrace();
}
}
});
readThread.start();
for (int i = 0; i <= writeThread.length - 1; i++) {
writeThread[i] = new Thread(new Runnable() {
@Override
public void run() {
while (!isCompleted || !dataQueue.isEmpty()) {
String uid = null;
if (dataQueue.isEmpty()) {
try {
Thread.sleep(100);
continue;
} catch (InterruptedException e) {
e.printStackTrace();
}
} else {
try {
synchronized (dataQueue) {
uid = dataQueue.remove(0);
}
} catch (IndexOutOfBoundsException e) {
continue;
}
}
long uf = cluster.zcard("u_f_" + uid);
long ua = cluster.zcard("u_a_" + uid);
long up = cluster.zcard("u_p_" + uid);
String info = "uid:" + uid + " uf:" + uf + " ua:" + ua + " up:" + up;
if (uf == 0 && ua <= 1 && up == 2) {
long count = writeCount.incrementAndGet();
System.out.println("marked->" + info);
if (count % 10000 == 0) {
if (writeLastCountTime > 0) {
long useTime = System.currentTimeMillis() - writeLastCountTime;
float speed = (float) ((count - lastWriteCount.get()) / (useTime / 1000.0));
System.out.println("write count:" + count + "/" + readCount + " speed:"
+ speedFormat.format(speed));
}
writeLastCountTime = System.currentTimeMillis();
lastWriteCount.set(count);
}
}
}
}
}, "write thread [" + i + "]");
writeThread[i].setDaemon(true);
writeThread[i].start();
}
for (Thread thread : writeThread) {
do {
if (thread.isAlive()) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
} while (thread.isAlive());
}
System.out.println("statisticsMap->begin");
Iterator<Entry<String, AtomicLong>> it = statisticsMap.entrySet().iterator();
while (it.hasNext()) {
Entry<String, AtomicLong> entry = it.next();
System.out.println(entry.getKey() + "->" + entry.getValue());
}
System.out.println("statisticsMap->end");
long useTime = System.currentTimeMillis() - writeBeginTime, totalCount = writeCount.get();
float speed = (float) (totalCount / (useTime / 1000.0));
System.out.println("scanCount:" + readCount.get() + " markedCount:" + totalCount + " errorCount:"
+ errorCount.get() + " speed:" + speedFormat.format(speed) + " useTime:" + (useTime / 1000.0) + "s");
}
/**
* 清理恢复数据导致的垃圾数据
*/
public void followAttentionDel(String importKey, final String filePath) throws IOException {
BufferedWriter bw = new BufferedWriter(new FileWriter(filePath + ".deleted"));
final String[] keys = importKey.split(",");
long delCount = 0;
for (String key : keys) {
if (key.startsWith("u_a_")) {
String zcursor = "0";
do {
ScanResult<Tuple> sscanResult = cluster.zscan(key, zcursor, sp);
zcursor = sscanResult.getStringCursor();
for (Tuple data : sscanResult.getResult()) {
String followUid = data.getElement();
String uid = key.substring("u_a_".length());
String keyDel = "u_a_" + followUid;
long result = cluster.zrem(keyDel, uid);//移除关注的人对自己的关注
delCount++;
bw.write("result:" + result + " " + keyDel + "->" + data.getScore());
bw.write("\r\n");
}
} while (!"0".equals(zcursor));
}
}
bw.close();
System.out.println("followDel->delCount:" + delCount);
}
/**
* 删除用户粉丝
*/
public void followDel(String importKey, final String filePath) throws IOException {
BufferedWriter bw = new BufferedWriter(new FileWriter(filePath + ".deleted"));
final String[] keys = importKey.split(",");
long delCount = 0;
for (String key : keys) {
if (key.startsWith("u_f_")) {
String zcursor = "0";
do {
ScanResult<Tuple> sscanResult = cluster.zscan(key, zcursor, sp);
zcursor = sscanResult.getStringCursor();
for (Tuple data : sscanResult.getResult()) {
String followUid = data.getElement();
String uid = key.substring("u_f_".length());
cluster.zrem("u_a_" + followUid, uid);//从粉丝队列移除对自己的关注
delCount++;
bw.write(followUid + "->" + data.getScore());
bw.write("\r\n");
}
} while (!"0".equals(zcursor));
}
}
bw.close();
System.out.println("followDel->delCount:" + delCount);
}
/**
* 恢复用户粉丝
*/
public void followRestore(String importKey, final String filePath) {
final String[] importKeyPre = importKey.split(",");
final List<JSONObject> dataQueue = Collections.synchronizedList(new LinkedList<JSONObject>());// 待处理数据队列
final Thread[] writeThread = new Thread[cluster.getClusterNodes().size() * 3];//节点数的3倍
Thread readThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader br = new BufferedReader(new FileReader(filePath));
String data = null;
while ((data = br.readLine()) != null) {
JSONObject json = JSONObject.parseObject(data);
dataQueue.add(json);
long count = readCount.incrementAndGet();
if (count % 50000 == 0) {
if (readLastCountTime > 0) {
long useTime = System.currentTimeMillis() - readLastCountTime;
float speed = (float) ((count - lastReadCount.get()) / (useTime / 1000.0));
System.out.println("read count:" + count + " speed:" + speedFormat.format(speed));
}
readLastCountTime = System.currentTimeMillis();
lastReadCount.set(count);
synchronized (dataQueue) {
Collections.shuffle(dataQueue);//导出是按节点导出的,这样可以提升性能
}
while (dataQueue.size() > 100000) {//防止内存写爆了
Thread.sleep(1000);
}
}
}
br.close();
synchronized (dataQueue) {
Collections.shuffle(dataQueue);
}
isCompleted = true;
while (!dataQueue.isEmpty()) {//等待数据写入完成
Thread.sleep(500);
}
long useTime = System.currentTimeMillis() - writeBeginTime, totalCount = readCount.get();
float speed = (float) (totalCount / (useTime / 1000.0));
System.out.println("write total:" + totalCount + " speed:" + speedFormat.format(speed)
+ " useTime:" + (useTime / 1000.0) + "s");
} catch (Exception e) {
e.printStackTrace();
}
}
});
readThread.start();
for (int i = 0; i <= writeThread.length - 1; i++) {
writeThread[i] = new Thread(new Runnable() {
@Override
public void run() {
while (!isCompleted || !dataQueue.isEmpty()) {
JSONObject json = null;
if (dataQueue.isEmpty()) {
try {
Thread.sleep(100);
continue;
} catch (InterruptedException e) {
e.printStackTrace();
}
} else {
try {
synchronized (dataQueue) {
json = dataQueue.remove(0);
}
} catch (IndexOutOfBoundsException e) {
continue;
}
}
String key = json.getString("key");
String type = json.getString("type");
Object oject = json.get("value");
boolean isNeedImport = false;
for (String keyImport : importKeyPre) {
if ("*".equals(keyImport) || key.startsWith(keyImport)) {
isNeedImport = true;
break;
}
}
//list使用合并
if (isNeedImport) {
if ("hash".equals(type)) {
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
Map<String, String> hash = new HashMap<String, String>();
while (it.hasNext()) {
JSONObject jsonData = (JSONObject) it.next();
String dataKey = jsonData.getString("key");
String dataValue = jsonData.getString("value");
hash.put(dataKey, dataValue);
}
cluster.hmset(key, hash);
} else if ("string".equals(type)) {
String dataValue = (String) oject;
cluster.set(key, dataValue);
} else if ("list".equals(type)) {
JSONArray value = (JSONArray) oject;
List<String> inDb = cluster.lrange(key, 0, -1);
Iterator<Object> it = value.iterator();
while (it.hasNext()) {
String dataValue = (String) it.next();
if (!inDb.contains(dataValue)) {//list使用合并
cluster.rpush(key, dataValue);
} else {
// System.out.println("value:" + value);
}
}
} else if ("set".equals(type)) {
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
while (it.hasNext()) {
String dataValue = (String) it.next();
cluster.sadd(key, dataValue);
}
} else if ("zset".equals(type)) {
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
while (it.hasNext()) {
JSONObject jsonData = (JSONObject) it.next();
double score = jsonData.getLong("score");
String dataValue = jsonData.getString("value");
if (key.startsWith("u_f_")) {//粉丝队列
cluster.zadd(key, score, dataValue);//加粉丝
String uid = key.substring("u_f_".length());
cluster.zadd("u_a_" + dataValue, score, uid);//加关注
} else if (key.startsWith("u_a_")) {//关注队列
cluster.zadd(key, score, dataValue);//加关注
String uid = key.substring("u_a_".length());
cluster.zadd("u_f_" + dataValue, score, uid);//加粉丝
}
}
} else {
System.out.println("unknow keyType:" + type + "key:" + key);
}
long count = writeCount.incrementAndGet();
if (count % 10000 == 0) {
if (writeLastCountTime > 0) {
long useTime = System.currentTimeMillis() - writeLastCountTime;
float speed = (float) ((count - lastWriteCount.get()) / (useTime / 1000.0));
System.out.println("write count:" + count + "/" + readCount + " speed:"
+ speedFormat.format(speed));
}
writeLastCountTime = System.currentTimeMillis();
lastWriteCount.set(count);
}
}
}
}
}, "write thread [" + i + "]");
writeThread[i].setDaemon(true);
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
writeThread[i].start();
}
}
/**
* 按照key前缀查询
* @param importIfNotExit
*/
public void importKey(String importKey, final String filePath) {
final String[] importKeyPre = importKey.split(",");
final List<JSONObject> dataQueue = Collections.synchronizedList(new LinkedList<JSONObject>());// 待处理数据队列
final Thread[] writeThread = new Thread[cluster.getClusterNodes().size() * 3];//节点数的3倍
Thread readThread = new Thread(new Runnable() {
@Override
public void run() {
try {
BufferedReader br = new BufferedReader(new FileReader(filePath));
String data = null;
while ((data = br.readLine()) != null) {
JSONObject json = JSONObject.parseObject(data);
dataQueue.add(json);
long count = readCount.incrementAndGet();
if (count % 50000 == 0) {
if (readLastCountTime > 0) {
long useTime = System.currentTimeMillis() - readLastCountTime;
float speed = (float) ((count - lastReadCount.get()) / (useTime / 1000.0));
System.out.println("read count:" + count + " speed:" + speedFormat.format(speed));
}
readLastCountTime = System.currentTimeMillis();
lastReadCount.set(count);
synchronized (dataQueue) {
Collections.shuffle(dataQueue);//导出是按节点导出的,这样可以提升性能
}
while (dataQueue.size() > 100000) {//防止内存写爆了
Thread.sleep(1000);
}
}
}
br.close();
synchronized (dataQueue) {
Collections.shuffle(dataQueue);
}
isCompleted = true;
while (!dataQueue.isEmpty()) {//等待数据写入完成
Thread.sleep(500);
}
long useTime = System.currentTimeMillis() - writeBeginTime, totalCount = readCount.get();
float speed = (float) (totalCount / (useTime / 1000.0));
System.out.println("write total:" + totalCount + " speed:" + speedFormat.format(speed)
+ " useTime:" + (useTime / 1000.0) + "s");
for (int i = 0; i <= writeThread.length - 1; i++) {
writeThread[i].interrupt();
}
} catch (Exception e) {
e.printStackTrace();
}
}
});
readThread.start();
for (int i = 0; i <= writeThread.length - 1; i++) {
writeThread[i] = new Thread(new Runnable() {
@Override
public void run() {
while (!isCompleted || !dataQueue.isEmpty()) {
JSONObject json = null;
if (dataQueue.isEmpty()) {
try {
Thread.sleep(100);
continue;
} catch (InterruptedException e) {
e.printStackTrace();
}
} else {
try {
synchronized (dataQueue) {
json = dataQueue.remove(0);
}
} catch (IndexOutOfBoundsException e) {
continue;
}
}
String key = json.getString("key");
String type = json.getString("type");
Object oject = json.get("value");
boolean isNeedImport = false;
for (String keyImport : importKeyPre) {
if ("*".equals(keyImport) || key.startsWith(keyImport)) {
isNeedImport = true;
break;
}
}
//list使用合并
if (isNeedImport) {
if ("hash".equals(type)) {
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
Map<String, String> hash = new HashMap<String, String>();
while (it.hasNext()) {
JSONObject jsonData = (JSONObject) it.next();
String dataKey = jsonData.getString("key");
String dataValue = jsonData.getString("value");
hash.put(dataKey, dataValue);
}
cluster.hmset(key, hash);
} else if ("string".equals(type)) {
String dataValue = (String) oject;
cluster.set(key, dataValue);
} else if ("list".equals(type)) {
JSONArray value = (JSONArray) oject;
List<String> inDb = cluster.lrange(key, 0, -1);
Iterator<Object> it = value.iterator();
while (it.hasNext()) {
String dataValue = (String) it.next();
if (!inDb.contains(dataValue)) {//list使用合并
cluster.rpush(key, dataValue);
} else {
// System.out.println("value:" + value);
}
}
} else if ("set".equals(type)) {
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
while (it.hasNext()) {
String dataValue = (String) it.next();
cluster.sadd(key, dataValue);
}
} else if ("zset".equals(type)) {
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
while (it.hasNext()) {
JSONObject jsonData = (JSONObject) it.next();
double score = jsonData.getLong("score");
String dataValue = jsonData.getString("value");
cluster.zadd(key, score, dataValue);
}
} else {
System.out.println("unknow keyType:" + type + "key:" + key);
}
long count = writeCount.incrementAndGet();
if (count % 10000 == 0) {
if (writeLastCountTime > 0) {
long useTime = System.currentTimeMillis() - writeLastCountTime;
float speed = (float) ((count - lastWriteCount.get()) / (useTime / 1000.0));
System.out.println("write count:" + count + "/" + readCount + " speed:"
+ speedFormat.format(speed));
}
writeLastCountTime = System.currentTimeMillis();
lastWriteCount.set(count);
}
}
}
}
}, "write thread [" + i + "]");
writeThread[i].setDaemon(true);
writeThread[i].start();
}
}
private void importMongodb(String KeyPre, String filePath) {
MongoClient mongo = new MongoClient("mycentos-01", 27017);
MongoDatabase db0 = mongo.getDatabase("db0");
BufferedReader br = null;
try {
br = new BufferedReader(new FileReader(filePath));
String data = null;
String[] importKeyPre = KeyPre.split(",");
while ((data = br.readLine()) != null) {
JSONObject json = JSONObject.parseObject(data);
String key = json.getString("key");
String type = json.getString("type");
Object oject = json.get("value");
boolean isNeedImport = false;
for (String keyImport : importKeyPre) {
if ("*".equals(keyImport) || key.startsWith(keyImport)) {
isNeedImport = true;
break;
}
}
//list使用合并
if (isNeedImport) {
int index = key.lastIndexOf("_");
String collectionName = null;
if (index > 0) {
collectionName = key.substring(0, index);
} else {
collectionName = "default";
}
if ("hash".equals(type)) {
try {
//db0.createCollection(collectionName);
} catch (Exception e) {
e.printStackTrace();
}
MongoCollection<Document> coll = db0.getCollection(collectionName);
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
Map<String, String> hash = new HashMap<String, String>();
Document info = new Document();
while (it.hasNext()) {
JSONObject jsonData = (JSONObject) it.next();
String dataKey = jsonData.getString("key");
String dataValue = jsonData.getString("value");
hash.put(dataKey, dataValue);
info.put(dataKey, dataValue);
}
coll.insertOne(info);
WriteConcern concern = coll.getWriteConcern();
concern.isAcknowledged();
//cluster.hmset(key, hash);
} else if ("string".equals(type)) {
String dataValue = (String) oject;
//cluster.set(key, dataValue);
} else if ("list".equals(type)) {
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
while (it.hasNext()) {
String dataValue = (String) it.next();
//cluster.rpush(key, dataValue);
}
} else if ("set".equals(type)) {
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
while (it.hasNext()) {
String dataValue = (String) it.next();
//cluster.sadd(key, dataValue);
}
} else if ("zset".equals(type)) {
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
while (it.hasNext()) {
JSONObject jsonData = (JSONObject) it.next();
double score = jsonData.getLong("score");
String dataValue = jsonData.getString("value");
//cluster.zadd(key, score, dataValue);
}
} else {
System.out.println("unknow keyType:" + type + "key:" + key);
}
long count = writeCount.incrementAndGet();
if (count % 10000 == 0) {
if (writeLastCountTime > 0) {
long useTime = System.currentTimeMillis() - writeLastCountTime;
float speed = (float) ((count - lastWriteCount.get()) / (useTime / 1000.0));
System.out.println("write count:" + count + "/" + readCount + " speed:"
+ speedFormat.format(speed));
}
writeLastCountTime = System.currentTimeMillis();
lastWriteCount.set(count);
}
}
}
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
br.close();
} catch (IOException e) {
e.printStackTrace();
}
}
mongo.close();
}
/**
* 按key导出数据
*/
public void exportKeyPre(String keyPre, final String filePath) {
final String[] exportKeyPre = keyPre.split(",");
createExportFile(filePath + ".0");
Iterator<Entry<String, JedisPool>> nodes = cluster.getClusterNodes().entrySet().iterator();
List<Thread> exportTheadList = new ArrayList<Thread>();
while (nodes.hasNext()) {
Entry<String, JedisPool> entry = nodes.next();
try {
entry.getValue().getResource();
} catch (redis.clients.jedis.exceptions.JedisConnectionException e) {//有失败的节点连不上
System.out.println(entry.getKey() + " conn error:" + e.getMessage());
continue;
}
final Jedis nodeCli = entry.getValue().getResource();
String info = entry.getValue().getResource().info();
if (info.contains("role:slave")) {//只导出master
continue;
}
Thread exportThread = new Thread(new Runnable() {
@Override
public void run() {
String cursor = "0";
do {
ScanResult<String> keys = nodeCli.scan(cursor);
cursor = keys.getStringCursor();
List<String> result = keys.getResult();
for (String key : result) {
boolean isExport = false;
for (String keyExport : exportKeyPre) {
if ("*".equals(keyExport) || key.startsWith(keyExport)) {
isExport = true;
break;
}
}
long count = readCount.incrementAndGet();
if (count % 1000000 == 0) {
if (readLastCountTime > 0) {
long useTime = System.currentTimeMillis() - readLastCountTime;
float speed = (float) ((count - lastReadCount.get()) / (useTime / 1000.0));
System.out.println("scan count:" + count + " speed:" + speedFormat.format(speed));
}
readLastCountTime = System.currentTimeMillis();
lastReadCount.set(count);
}
if (!isExport) {
continue;
}
JSONObject json = new JSONObject();
json.put("key", key);
String keyType = nodeCli.type(key);
json.put("type", keyType);
if ("hash".equals(keyType)) {
String hcursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<Entry<String, String>> hscanResult = nodeCli.hscan(key, hcursor, sp);
hcursor = hscanResult.getStringCursor();
for (Entry<String, String> entry : hscanResult.getResult()) {
JSONObject valueData = new JSONObject();
valueData.put("key", entry.getKey());
valueData.put("value", entry.getValue());
value.add(valueData);
}
} while (!"0".equals(hcursor));
json.put("value", value);
} else if ("string".equals(keyType)) {
json.put("value", nodeCli.get(key));
} else if ("list".equals(keyType)) {
int readSize, readCount = 100;
long start = 0, end = start + readCount;
JSONArray value = new JSONArray();
do {
List<String> data = nodeCli.lrange(key, start, end);
readSize = data.size();
for (int i = 0; i < readSize; i++) {
value.add(data.get(i));
}
start = end + 1;
end += readSize;
} while (readSize == readCount + 1);
json.put("value", value);
} else if ("set".equals(keyType)) {
String scursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<String> sscanResult = nodeCli.sscan(key, scursor, sp);
scursor = sscanResult.getStringCursor();
for (String data : sscanResult.getResult()) {
value.add(data);
}
} while (!"0".equals(scursor));
json.put("value", value);
} else if ("zset".equals(keyType)) {
String zcursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<Tuple> sscanResult = nodeCli.zscan(key, zcursor, sp);
zcursor = sscanResult.getStringCursor();
for (Tuple data : sscanResult.getResult()) {
JSONObject dataJson = new JSONObject();
dataJson.put("score", data.getScore());
dataJson.put("value", data.getElement());
value.add(dataJson);
}
} while (!"0".equals(zcursor));
json.put("value", value);
} else {
System.out.println("unknow keyType:" + keyType + "key:" + key);
}
json.put("time", System.currentTimeMillis());
writeFile(json.toJSONString(), "export", filePath);
}
} while (!"0".equals(cursor));
}
}, entry.getKey() + "export thread");
exportTheadList.add(exportThread);
exportThread.start();
}
for (Thread thread : exportTheadList) {
do {
if (thread.isAlive()) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
} while (thread.isAlive());
}
long useTime = System.currentTimeMillis() - writeBeginTime, totalCount = writeCount.get();
float speed = (float) (totalCount / (useTime / 1000.0));
System.out.println("scan count:" + readCount.get() + " export total:" + totalCount + " speed:"
+ speedFormat.format(speed) + " useTime:" + (useTime / 1000.0) + "s");
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
private static BufferedWriter bw = null;
public static synchronized void createExportFile(String filePath) {
String pathDir = filePath.substring(0, filePath.lastIndexOf("/"));
File file = new File(pathDir);
if (!file.isDirectory()) {
file.mkdirs();
}
File f = new File(filePath);
FileOutputStream fos = null;
try {
fos = new FileOutputStream(f);
// write UTF8 BOM mark if file is empty
if (f.length() < 1) {
final byte[] bom = new byte[] { (byte) 0xEF, (byte) 0xBB, (byte) 0xBF };
fos.write(bom);
}
} catch (IOException ex) {
} finally {
try {
fos.close();
} catch (Exception ex) {
}
}
try {
bw = new BufferedWriter(new FileWriter(filePath, true));
} catch (IOException e) {
e.printStackTrace();
}
}
private static final long FILE_PARTITION_LINE_COUNT = 1000000;//100W
public static synchronized void writeFile(String data, String optType, String filePath) {
try {
if (null == bw) {
createExportFile(filePath + ".0");
}
bw.write(data);
bw.write('\r');
bw.write('\n');
long count = writeCount.incrementAndGet();
if (count % 100000 == 0) {
if (writeLastCountTime > 0) {
long useTime = System.currentTimeMillis() - writeLastCountTime;
float speed = (float) ((count - lastWriteCount.get()) / (useTime / 1000.0));
System.out.println(optType + " count:" + count + " speed:" + speedFormat.format(speed));
}
writeLastCountTime = System.currentTimeMillis();
lastWriteCount.set(count);
}
if (count % FILE_PARTITION_LINE_COUNT == 0) {//分文件 100W
createExportFile(filePath + "." + (count / FILE_PARTITION_LINE_COUNT));
}
} catch (IOException e) {
e.printStackTrace();
}
}
public void exportKeysFile(String keyFilePath, String filePath) {
BufferedReader br = null;
try {
br = new BufferedReader(new FileReader(keyFilePath));
String data = null;
while ((data = br.readLine()) != null) {
exportKeys(data, filePath);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (null != br) {
br.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
* 按照key前缀查询
* @param importIfNotExit
*/
public void importKey2(final String indexKey, String preKey, final String filePath) {
final String[] importKeyPre = indexKey.split(",");
final List<JSONObject> dataQueue = Collections.synchronizedList(new LinkedList<JSONObject>());// 待处理数据队列
final Thread[] writeThread = new Thread[cluster.getClusterNodes().size() * 3];//节点数的3倍
Thread readThread = new Thread(new Runnable() {
@Override
public void run() {
String hcursor = "0";
JSONObject json = new JSONObject();
do {
ScanResult<Tuple> hscanResult = cluster.zscan(indexKey, hcursor, sp);
hcursor = hscanResult.getStringCursor();
String fileExt;
for (Tuple entry : hscanResult.getResult()) {
String uidKey = entry.getElement();
long zcard = cluster.zcard("u_f_" + uidKey);
json.put("uid", uidKey);
json.put("zcard", zcard);
if (zcard > 1000) {
fileExt = "1000+";
} else if (zcard > 500 && zcard <= 1000) {
fileExt = "500-1000";
} else if (zcard > 300 && zcard <= 500) {
fileExt = "300-500";
} else if (zcard > 200 && zcard <= 300) {
fileExt = "200-300";
} else if (zcard > 100 && zcard <= 200) {
fileExt = "100-200";
} else if (zcard >= 1 && zcard <= 100) {
fileExt = "1-100";
} else {
fileExt = "0";
}
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter(filePath + fileExt, true));
bw.write(json.toJSONString());
bw.write('\r');
bw.write('\n');
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
long count = readCount.incrementAndGet();
if (count % 10000 == 0) {
if (readLastCountTime > 0) {
long useTime = System.currentTimeMillis() - readLastCountTime;
float speed = (float) ((count - lastReadCount.get()) / (useTime / 1000.0));
System.out.println(" count:" + count + " speed:" + speedFormat.format(speed));
}
readLastCountTime = System.currentTimeMillis();
lastReadCount.set(count);
}
}
} while (!"0".equals(hcursor));
try {
BufferedReader br = new BufferedReader(new FileReader(filePath));
String data = null;
while ((data = br.readLine()) != null) {
dataQueue.add(json);
long count = readCount.incrementAndGet();
if (count % 50000 == 0) {
if (readLastCountTime > 0) {
long useTime = System.currentTimeMillis() - readLastCountTime;
float speed = (float) ((count - lastReadCount.get()) / (useTime / 1000.0));
System.out.println("read count:" + count + " speed:" + speedFormat.format(speed));
}
readLastCountTime = System.currentTimeMillis();
lastReadCount.set(count);
synchronized (dataQueue) {
Collections.shuffle(dataQueue);//导出是按节点导出的,这样可以提升性能
}
while (dataQueue.size() > 100000) {//防止内存写爆了
Thread.sleep(1000);
}
}
}
br.close();
synchronized (dataQueue) {
Collections.shuffle(dataQueue);
}
isCompleted = true;
while (!dataQueue.isEmpty()) {//等待数据写入完成
Thread.sleep(500);
}
long useTime = System.currentTimeMillis() - writeBeginTime, totalCount = readCount.get();
float speed = (float) (totalCount / (useTime / 1000.0));
System.out.println("write total:" + totalCount + " speed:" + speedFormat.format(speed)
+ " useTime:" + (useTime / 1000.0) + "s");
for (int i = 0; i <= writeThread.length - 1; i++) {
writeThread[i].interrupt();
}
} catch (Exception e) {
e.printStackTrace();
}
}
});
readThread.start();
for (int i = 0; i <= writeThread.length - 1; i++) {
writeThread[i] = new Thread(new Runnable() {
@Override
public void run() {
while (!isCompleted || !dataQueue.isEmpty()) {
JSONObject json = null;
if (dataQueue.isEmpty()) {
try {
Thread.sleep(100);
continue;
} catch (InterruptedException e) {
e.printStackTrace();
}
} else {
try {
synchronized (dataQueue) {
json = dataQueue.remove(0);
}
} catch (IndexOutOfBoundsException e) {
continue;
}
}
String key = json.getString("key");
String type = json.getString("type");
Object oject = json.get("value");
boolean isNeedImport = false;
for (String keyImport : importKeyPre) {
if ("*".equals(keyImport) || key.startsWith(keyImport)) {
isNeedImport = true;
break;
}
}
//list使用合并
if (isNeedImport) {
if ("hash".equals(type)) {
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
Map<String, String> hash = new HashMap<String, String>();
while (it.hasNext()) {
JSONObject jsonData = (JSONObject) it.next();
String dataKey = jsonData.getString("key");
String dataValue = jsonData.getString("value");
hash.put(dataKey, dataValue);
}
cluster.hmset(key, hash);
} else if ("string".equals(type)) {
String dataValue = (String) oject;
cluster.set(key, dataValue);
} else if ("list".equals(type)) {
JSONArray value = (JSONArray) oject;
List<String> inDb = cluster.lrange(key, 0, -1);
Iterator<Object> it = value.iterator();
while (it.hasNext()) {
String dataValue = (String) it.next();
if (!inDb.contains(dataValue)) {//list使用合并
cluster.rpush(key, dataValue);
} else {
// System.out.println("value:" + value);
}
}
} else if ("set".equals(type)) {
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
while (it.hasNext()) {
String dataValue = (String) it.next();
cluster.sadd(key, dataValue);
}
} else if ("zset".equals(type)) {
JSONArray value = (JSONArray) oject;
Iterator<Object> it = value.iterator();
while (it.hasNext()) {
JSONObject jsonData = (JSONObject) it.next();
double score = jsonData.getLong("score");
String dataValue = jsonData.getString("value");
cluster.zadd(key, score, dataValue);
}
} else {
System.out.println("unknow keyType:" + type + "key:" + key);
}
long count = writeCount.incrementAndGet();
if (count % 10000 == 0) {
if (writeLastCountTime > 0) {
long useTime = System.currentTimeMillis() - writeLastCountTime;
float speed = (float) ((count - lastWriteCount.get()) / (useTime / 1000.0));
System.out.println("write count:" + count + "/" + readCount + " speed:"
+ speedFormat.format(speed));
}
writeLastCountTime = System.currentTimeMillis();
lastWriteCount.set(count);
}
}
}
}
}, "write thread [" + i + "]");
writeThread[i].setDaemon(true);
writeThread[i].start();
}
}
/**
* 根据用户关注关系修复数据,如果已经关注却不在粉丝队列里向粉丝队列里添加;删除自己关注自己、自己是自己的粉丝、自己是自己的好友的数据
*/
public void uaCheck(final String filePath) {
final String u_a_ = "u_a_";
createExportFile(filePath);
Iterator<Entry<String, JedisPool>> nodes = cluster.getClusterNodes().entrySet().iterator();
List<Thread> exportTheadList = new ArrayList<Thread>();
while (nodes.hasNext()) {
Entry<String, JedisPool> entry = nodes.next();
try {
entry.getValue().getResource();
} catch (redis.clients.jedis.exceptions.JedisConnectionException e) {//有失败的节点连不上
System.out.println(entry.getKey() + " conn error:" + e.getMessage());
continue;
}
final Jedis nodeCli = entry.getValue().getResource();
String info = entry.getValue().getResource().info();
if (info.contains("role:slave")) {//只导出master
continue;
}
Thread exportThread = new Thread(new Runnable() {
@Override
public void run() {
String cursor = "0";
do {
ScanResult<String> keys = nodeCli.scan(cursor, sp);
cursor = keys.getStringCursor();
List<String> result = keys.getResult();
for (String key : result) {
long count = readCount.incrementAndGet();
if (count % 1000000 == 0) {
if (readLastCountTime > 0) {
long useTime = System.currentTimeMillis() - readLastCountTime;
float speed = (float) ((count - lastReadCount.get()) / (useTime / 1000.0));
System.out.println("scanCount:" + count + " speed:" + speedFormat.format(speed)
+ " checkCount:" + checkCount.get() + " errorCount:" + errorCount.get());
}
readLastCountTime = System.currentTimeMillis();
lastReadCount.set(count);
}
String uid;
if (key.startsWith(u_a_)) {
uid = key.substring(4);
try {
Integer.valueOf(uid);
} catch (Exception e) {
continue;
}
} else {
continue;
}
String errorInfo;
Double score;
Date time = new Date();
if (null != (score = cluster.zscore("u_a_" + uid, uid))) {//自己关注自己的需要去掉
time.setTime((long) (score * 1000));
errorInfo = uid + "-u_a_>" + uid + " score:" + time;
cluster.zrem("u_a_" + uid, uid);
writeFile(errorInfo, "export", filePath);
}
if (null != (score = cluster.zscore("u_f_" + uid, uid))) {//自己是自己的粉丝需要去掉
time.setTime((long) (score * 1000));
errorInfo = uid + "-u_f_>" + uid + " score:" + time;
cluster.zrem("u_f_" + uid, uid);
writeFile(errorInfo, "export", filePath);
}
if (null != (score = cluster.zscore("u_friend_" + uid, uid))) {//去掉好友关系
time.setTime((long) (score * 1000));
errorInfo = uid + "-u_friend_>" + uid + " score:" + time;
cluster.zrem("u_friend_" + uid, uid);
writeFile(errorInfo, "export", filePath);
}
String zcursor = "0";
String u_a_id;
do {
ScanResult<Tuple> sscanResult = nodeCli.zscan(key, zcursor, sp);
zcursor = sscanResult.getStringCursor();
for (Tuple data : sscanResult.getResult()) {
u_a_id = data.getElement();
score = data.getScore();
checkCount.incrementAndGet();
if ("99521678".endsWith(u_a_id) || "88011458".equals(u_a_id)) {
continue;//种草君,假leo不管
}
if (null == cluster.zscore("u_f_" + u_a_id, uid)) {//关注了粉丝列表没有
cluster.zadd("u_f_" + u_a_id, score, uid);//向粉丝列表添加来修复数据
errorCount.incrementAndGet();
errorInfo = uid + "->" + u_a_id;
System.out.println(errorInfo);
writeFile(errorInfo, "export", filePath);
}
}
} while (!"0".equals(zcursor));
}
} while ((!"0".equals(cursor)));
}
}, entry.getKey() + "export thread");
exportTheadList.add(exportThread);
exportThread.start();
}
for (Thread thread : exportTheadList) {
do {
if (thread.isAlive()) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
} while (thread.isAlive());
}
long useTime = System.currentTimeMillis() - writeBeginTime, totalCount = writeCount.get();
float speed = (float) (totalCount / (useTime / 1000.0));
System.out.println("scan count:" + readCount.get() + " export total:" + totalCount + " speed:"
+ speedFormat.format(speed) + " checkCount:" + checkCount.get() + " errorCount:" + errorCount.get()
+ " useTime:" + (useTime / 1000.0) + "s");
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 根据粉丝队列修复数据,如果没有关注从关注队列里去掉
*/
public void ufCheck(final String filePath) {
final String u_f_ = "u_f_";
createExportFile(filePath);
Iterator<Entry<String, JedisPool>> nodes = cluster.getClusterNodes().entrySet().iterator();
List<Thread> exportTheadList = new ArrayList<Thread>();
while (nodes.hasNext()) {
Entry<String, JedisPool> entry = nodes.next();
try {
entry.getValue().getResource();
} catch (redis.clients.jedis.exceptions.JedisConnectionException e) {//有失败的节点连不上
System.out.println(entry.getKey() + " conn error:" + e.getMessage());
continue;
}
final Jedis nodeCli = entry.getValue().getResource();
String info = entry.getValue().getResource().info();
if (info.contains("role:slave")) {//只导出master
continue;
}
Thread exportThread = new Thread(new Runnable() {
@Override
public void run() {
String cursor = "0";
do {
ScanResult<String> keys = nodeCli.scan(cursor, sp);
cursor = keys.getStringCursor();
List<String> result = keys.getResult();
for (String key : result) {
long count = readCount.incrementAndGet();
if (count % 1000000 == 0) {
if (readLastCountTime > 0) {
long useTime = System.currentTimeMillis() - readLastCountTime;
float speed = (float) ((count - lastReadCount.get()) / (useTime / 1000.0));
System.out.println("scanCount:" + count + " speed:" + speedFormat.format(speed)
+ " checkCount:" + checkCount.get() + " errorCount:" + errorCount.get());
}
readLastCountTime = System.currentTimeMillis();
lastReadCount.set(count);
}
String uid;
if (key.startsWith(u_f_)) {
uid = key.substring(4);
if ("99521678".equals(uid)) {//种草君的不管
continue;
}
try {
Integer.valueOf(uid);
} catch (Exception e) {
continue;
}
} else {
continue;
}
String zcursor = "0";
String u_f_id;
do {
ScanResult<Tuple> sscanResult = nodeCli.zscan(key, zcursor, sp);
zcursor = sscanResult.getStringCursor();
for (Tuple data : sscanResult.getResult()) {
u_f_id = data.getElement();
checkCount.incrementAndGet();
if (null == cluster.zscore("u_a_" + u_f_id, uid)) {//粉丝表里有,关注列表里没有,需要删除
cluster.zrem(key, u_f_id);//删除粉丝列表的数据来修复
errorCount.incrementAndGet();
String errorInfo = uid + "->" + u_f_id;
System.out.println(errorInfo);
writeFile(errorInfo, "export", filePath);
}
}
} while (!"0".equals(zcursor));
}
} while ((!"0".equals(cursor)));
}
}, entry.getKey() + "export thread");
exportTheadList.add(exportThread);
exportThread.start();
}
for (Thread thread : exportTheadList) {
do {
if (thread.isAlive()) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
} while (thread.isAlive());
}
long useTime = System.currentTimeMillis() - writeBeginTime, totalCount = writeCount.get();
float speed = (float) (totalCount / (useTime / 1000.0));
System.out.println("scan count:" + readCount.get() + " export total:" + totalCount + " speed:"
+ speedFormat.format(speed) + " checkCount:" + checkCount.get() + " errorCount:" + errorCount.get()
+ " useTime:" + (useTime / 1000.0) + "s");
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* hook线程
*/
static class CleanWorkThread extends Thread {
@Override
public void run() {
try {
if (null != bw) {
bw.close();
System.out.println("bw closed");
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
* 按key导出数据
*/
public void fansCount(final String filePath) {
final String[] exportKeyPre = "u_f_".split(",");
createExportFile(filePath);
Iterator<Entry<String, JedisPool>> nodes = cluster.getClusterNodes().entrySet().iterator();
List<Thread> exportTheadList = new ArrayList<Thread>();
while (nodes.hasNext()) {
Entry<String, JedisPool> entry = nodes.next();
try {
entry.getValue().getResource();
} catch (redis.clients.jedis.exceptions.JedisConnectionException e) {//有失败的节点连不上
System.out.println(entry.getKey() + " conn error:" + e.getMessage());
continue;
}
final Jedis nodeCli = entry.getValue().getResource();
String info = entry.getValue().getResource().info();
if (info.contains("role:slave")) {//只导出master
continue;
}
Thread exportThread = new Thread(new Runnable() {
@Override
public void run() {
String cursor = "0";
do {
ScanResult<String> keys = nodeCli.scan(cursor, sp);
cursor = keys.getStringCursor();
List<String> result = keys.getResult();
for (String key : result) {
boolean isExport = false;
for (String keyExport : exportKeyPre) {
if ("*".equals(keyExport) || key.startsWith(keyExport)) {
isExport = true;
break;
}
}
long count = readCount.incrementAndGet();
if (count % 1000000 == 0) {
if (readLastCountTime > 0) {
long useTime = System.currentTimeMillis() - readLastCountTime;
float speed = (float) ((count - lastReadCount.get()) / (useTime / 1000.0));
System.out.println("scan count:" + count + " speed:" + speedFormat.format(speed));
}
readLastCountTime = System.currentTimeMillis();
lastReadCount.set(count);
}
if (!isExport) {
continue;
}
String keyType = nodeCli.type(key);
String uidKey = key.substring(key.lastIndexOf('_') + 1);
StringBuffer sb = new StringBuffer();
if ("zset".equals(keyType)) {
long zcard = cluster.zcard("u_f_" + uidKey);
if (0 == zcard) {//大于0的才统计
continue;
}
sb.append("\"").append(uidKey).append("\"").append(',').append(zcard).append(',');
List<String> nickname = cluster.hmget("rpcUserInfo" + uidKey, "nickname");
if (null != nickname && nickname.size() > 0 && null != nickname.get(0)) {
sb.append("\"").append(nickname.get(0).replace(",", "")).append("\"");
} else {
sb.append("\"\"");
}
}
writeFile(sb.toString(), "export", filePath);
}
} while ((!"0".equals(cursor)));
}
}, entry.getKey() + "export thread");
exportTheadList.add(exportThread);
exportThread.start();
}
for (Thread thread : exportTheadList) {
do {
if (thread.isAlive()) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
} while (thread.isAlive());
}
long useTime = System.currentTimeMillis() - writeBeginTime, totalCount = writeCount.get();
float speed = (float) (totalCount / (useTime / 1000.0));
System.out.println("scan count:" + readCount.get() + " export total:" + totalCount + " speed:"
+ speedFormat.format(speed) + " useTime:" + (useTime / 1000.0) + "s");
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 按key导出数据
*/
public void keySizeCount(String key, String filePath) {
filePath += key;
String hcursor = "0";
JSONObject json = new JSONObject();
do {
ScanResult<Tuple> hscanResult = cluster.zscan(key, hcursor, sp);
hcursor = hscanResult.getStringCursor();
String fileExt;
for (Tuple entry : hscanResult.getResult()) {
String uidKey = entry.getElement();
long zcard = cluster.zcard("u_f_" + uidKey);
json.put("uid", uidKey);
json.put("zcard", zcard);
if (zcard > 100000) {
List<String> nickname = cluster.hmget("rpcUserInfo" + uidKey, "nickname");
if (null != nickname && nickname.size() > 0) {
json.put("nickname", nickname.get(0));
}
fileExt = "10W+";
} else if (zcard > 10000 && zcard <= 100000) {
fileExt = "1W-10W";
} else if (zcard > 1000 && zcard <= 10000) {
fileExt = "1k-1W";
} else if (zcard > 500 && zcard <= 1000) {
fileExt = "500-1000";
} else if (zcard > 300 && zcard <= 500) {
fileExt = "300-500";
} else if (zcard > 200 && zcard <= 300) {
fileExt = "200-300";
} else if (zcard > 100 && zcard <= 200) {
fileExt = "100-200";
} else if (zcard >= 1 && zcard <= 100) {
fileExt = "1-100";
} else {
fileExt = "0";
}
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter(filePath + fileExt, true));
bw.write(json.toJSONString());
bw.write('\r');
bw.write('\n');
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
long count = readCount.incrementAndGet();
if (count % 10000 == 0) {
if (readLastCountTime > 0) {
long useTime = System.currentTimeMillis() - readLastCountTime;
float speed = (float) ((count - lastReadCount.get()) / (useTime / 1000.0));
System.out.println(" count:" + count + " speed:" + speedFormat.format(speed));
}
readLastCountTime = System.currentTimeMillis();
lastReadCount.set(count);
}
}
} while (!"0".equals(hcursor));
}
/**
* 按key导出数据
*/
public void exportHostKeys(String keys, String filePath) {
for (String key : keys.split(",")) {
JSONObject json = new JSONObject();
json.put("key", key);
String keyType = cluster.type(key);
json.put("type", keyType);
if ("hash".equals(keyType)) {
String hcursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<Entry<String, String>> hscanResult = cluster.hscan(key, hcursor, sp);
hcursor = hscanResult.getStringCursor();
for (Entry<String, String> entry : hscanResult.getResult()) {
JSONObject valueData = new JSONObject();
valueData.put("key", entry.getKey());
valueData.put("value", entry.getValue());
value.add(valueData);
}
} while (!"0".equals(hcursor));
json.put("value", value);
} else if ("string".equals(keyType)) {
json.put("value", cluster.get(key));
} else if ("list".equals(keyType)) {
int readSize, readCount = 1;
long start = 0, end = start + readCount;
JSONArray value = new JSONArray();
do {
List<String> data = cluster.lrange(key, start, end);
readSize = data.size();
for (int i = 0; i < readSize; i++) {
value.add(data.get(i));
}
start = end + 1;
end += readSize;
} while (readSize == readCount + 1);
json.put("value", value);
} else if ("set".equals(keyType)) {
String scursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<String> sscanResult = cluster.sscan(key, scursor, sp);
scursor = sscanResult.getStringCursor();
for (String data : sscanResult.getResult()) {
value.add(data);
}
} while (!"0".equals(scursor));
json.put("value", value);
} else if ("zset".equals(keyType)) {
String zcursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<Tuple> sscanResult = cluster.zscan(key, zcursor, sp);
zcursor = sscanResult.getStringCursor();
for (Tuple data : sscanResult.getResult()) {
JSONObject dataJson = new JSONObject();
dataJson.put("score", data.getScore());
dataJson.put("value", data.getElement());
value.add(dataJson);
}
} while (!"0".equals(zcursor));
json.put("value", value);
} else {
System.out.println("unknow keyType:" + keyType + "key:" + key);
}
synchronized (this) {//删除多线程里会调用这个方法
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter(filePath, true));
bw.write(json.toJSONString());
bw.write('\r');
bw.write('\n');
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
/**
* 按key导出数据
*/
public void exportKeys(String keys, String filePath) {
for (String key : keys.split(",")) {
JSONObject json = new JSONObject();
json.put("key", key);
String keyType = cluster.type(key);
json.put("type", keyType);
if ("hash".equals(keyType)) {
String hcursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<Entry<String, String>> hscanResult = cluster.hscan(key, hcursor, sp);
hcursor = hscanResult.getStringCursor();
for (Entry<String, String> entry : hscanResult.getResult()) {
JSONObject valueData = new JSONObject();
valueData.put("key", entry.getKey());
valueData.put("value", entry.getValue());
value.add(valueData);
}
} while (!"0".equals(hcursor));
json.put("value", value);
} else if ("string".equals(keyType)) {
json.put("value", cluster.get(key));
} else if ("list".equals(keyType)) {
int readSize, readCount = 1;
long start = 0, end = start + readCount;
JSONArray value = new JSONArray();
do {
List<String> data = cluster.lrange(key, start, end);
readSize = data.size();
for (int i = 0; i < readSize; i++) {
value.add(data.get(i));
}
start = end + 1;
end += readSize;
} while (readSize == readCount + 1);
json.put("value", value);
} else if ("set".equals(keyType)) {
String scursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<String> sscanResult = cluster.sscan(key, scursor, sp);
scursor = sscanResult.getStringCursor();
for (String data : sscanResult.getResult()) {
value.add(data);
}
} while (!"0".equals(scursor));
json.put("value", value);
} else if ("zset".equals(keyType)) {
String zcursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<Tuple> sscanResult = cluster.zscan(key, zcursor, sp);
zcursor = sscanResult.getStringCursor();
for (Tuple data : sscanResult.getResult()) {
JSONObject dataJson = new JSONObject();
dataJson.put("score", data.getScore());
dataJson.put("value", data.getElement());
value.add(dataJson);
}
} while (!"0".equals(zcursor));
json.put("value", value);
} else {
System.out.println("unknow keyType:" + keyType + "key:" + key);
}
synchronized (this) {//删除多线程里会调用这个方法
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter(filePath, true));
bw.write(json.toJSONString());
bw.write('\r');
bw.write('\n');
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
/**
* 按key导出数据
*/
public void exportHostKeys(String host, String port, String keys, String filePath) {
String[] keysInfo = keys.split(",");
Jedis nodeCli = new Jedis(host, Integer.valueOf(port));
long beginTime = System.currentTimeMillis();
for (String key : keysInfo) {
JSONObject json = new JSONObject();
json.put("key", key);
String keyType = nodeCli.type(key);
json.put("type", keyType);
if ("hash".equals(keyType)) {
String hcursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<Entry<String, String>> hscanResult = nodeCli.hscan(key, hcursor, sp);
hcursor = hscanResult.getStringCursor();
for (Entry<String, String> entry : hscanResult.getResult()) {
JSONObject valueData = new JSONObject();
valueData.put("key", entry.getKey());
valueData.put("value", entry.getValue());
value.add(valueData);
}
} while (!"0".equals(hcursor));
json.put("value", value);
} else if ("string".equals(keyType)) {
json.put("value", nodeCli.get(key));
} else if ("list".equals(keyType)) {
int readSize, readCount = 1;
long start = 0, end = start + readCount;
JSONArray value = new JSONArray();
do {
List<String> data = nodeCli.lrange(key, start, end);
readSize = data.size();
for (int i = 0; i < readSize; i++) {
value.add(data.get(i));
//System.out.println("data:" + data.get(i));
}
start = end + 1;
end += readSize;
} while (readSize == readCount + 1);
json.put("value", value);
} else if ("set".equals(keyType)) {
String scursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<String> sscanResult = nodeCli.sscan(key, scursor, sp);
scursor = sscanResult.getStringCursor();
for (String data : sscanResult.getResult()) {
value.add(data);
}
} while (!"0".equals(scursor));
json.put("value", value);
} else if ("zset".equals(keyType)) {
String zcursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<Tuple> sscanResult = nodeCli.zscan(key, zcursor, sp);
zcursor = sscanResult.getStringCursor();
for (Tuple data : sscanResult.getResult()) {
JSONObject dataJson = new JSONObject();
dataJson.put("score", data.getScore());
dataJson.put("value", data.getElement());
value.add(dataJson);
}
} while (!"0".equals(zcursor));
json.put("value", value);
} else {
System.out.println("unknow keyType:" + keyType + "key:" + key);
}
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter(filePath));
bw.write(json.toJSONString());
bw.write('\r');
bw.write('\n');
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
nodeCli.close();
String useTime = " useTime->" + ((System.currentTimeMillis() - beginTime) / 1000) + "s";
System.out.println(useTime);
}
/**
* 按key导出数据
*/
public void exportKeyOneHost(String keyPre, String filePath) {
String[] exportKeyPre = keyPre.split(",");
Jedis nodeCli = new Jedis(REDIS_HOST, REDIS_PORT);
long scanTotalcount = 0, exportTotalCount = 0;
long beginTime = System.currentTimeMillis();
String info = nodeCli.info("Keyspace");
long dbKeySize = 0;
if (info.indexOf("db0:keys=") > 0) {
String value = info.substring(info.indexOf("db0:keys=") + "db0:keys=".length()).split(",")[0];
dbKeySize = Long.valueOf(value);
}
String cursor = "0";
long thisScanSize = 0, thisExportSize = 0;
do {
ScanResult<String> keys = nodeCli.scan(cursor, sp);
cursor = keys.getStringCursor();
List<String> result = keys.getResult();
for (String key : result) {
thisScanSize++;
scanTotalcount++;
if (thisScanSize % 1000 == 0) {
System.out.println("thisScanSize:" + thisScanSize + "/" + dbKeySize + " thisExportSize:"
+ thisExportSize + " totalUseTime:" + (System.currentTimeMillis() - beginTime) / 1000
+ "s)");
}
boolean isExport = false;
for (String keyExport : exportKeyPre) {
if ("*".equals(keyExport) || key.startsWith(keyExport)) {
isExport = true;
break;
}
}
if (!isExport) {
continue;
}
JSONObject json = new JSONObject();
json.put("key", key);
String keyType = nodeCli.type(key);
json.put("type", keyType);
if ("hash".equals(keyType)) {
String hcursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<Entry<String, String>> hscanResult = nodeCli.hscan(key, hcursor, sp);
hcursor = hscanResult.getStringCursor();
for (Entry<String, String> entry : hscanResult.getResult()) {
JSONObject valueData = new JSONObject();
valueData.put("key", entry.getKey());
valueData.put("value", entry.getValue());
value.add(valueData);
}
} while (!"0".equals(hcursor));
json.put("value", value);
} else if ("string".equals(keyType)) {
json.put("value", nodeCli.get(key));
} else if ("list".equals(keyType)) {
int readSize, readCount = 1;
long start = 0, end = start + readCount;
JSONArray value = new JSONArray();
do {
List<String> data = nodeCli.lrange(key, start, end);
readSize = data.size();
for (int i = 0; i < readSize; i++) {
value.add(data.get(i));
//System.out.println("data:" + data.get(i));
}
start = end + 1;
end += readSize;
} while (readSize == readCount + 1);
json.put("value", value);
} else if ("set".equals(keyType)) {
String scursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<String> sscanResult = nodeCli.sscan(key, scursor, sp);
scursor = sscanResult.getStringCursor();
for (String data : sscanResult.getResult()) {
value.add(data);
}
} while (!"0".equals(scursor));
json.put("value", value);
} else if ("zset".equals(keyType)) {
String zcursor = "0";
JSONArray value = new JSONArray();
do {
ScanResult<Tuple> sscanResult = nodeCli.zscan(key, zcursor, sp);
zcursor = sscanResult.getStringCursor();
for (Tuple data : sscanResult.getResult()) {
JSONObject dataJson = new JSONObject();
dataJson.put("score", data.getScore());
dataJson.put("value", data.getElement());
value.add(dataJson);
}
} while (!"0".equals(zcursor));
json.put("value", value);
} else {
System.out.println("unknow keyType:" + keyType + "key:" + key);
}
// System.out.println("data json:" + json);
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter(filePath, true));
bw.write(json.toJSONString());
bw.write('\r');
bw.write('\n');
thisExportSize++;
exportTotalCount++;
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
} while ((!"0".equals(cursor)));
nodeCli.close();
SimpleDateFormat dfs = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss [");
String useTime = " useTime->" + ((System.currentTimeMillis() - beginTime) / 1000) + "s";
System.out.println(dfs.format(new Date()) + "exportKey:" + keyPre + "]" + useTime);
System.out.println("scanTotalcount->" + scanTotalcount + " exportTotalCount->" + exportTotalCount);
}
/**
* 按照key前缀查询
*/
public void queryKeyLike(String pattern) {
Jedis jedis = new Jedis(REDIS_HOST, REDIS_PORT);
String nodes = jedis.clusterNodes();
jedis.close();
int count = 0;
long beginTime = System.currentTimeMillis();
for (String node : nodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
String host = nodeInfo[1].split(":")[0];
int port = Integer.valueOf(nodeInfo[1].split(":")[1]);
String type = nodeInfo[2];
if (type.contains("master")) {
Jedis nodeCli = new Jedis(host, port);//连接redis
Set<String> keys = nodeCli.keys(pattern);//
Iterator<String> t1 = keys.iterator();
while (t1.hasNext()) {
String key = t1.next();
System.out.println(key + "->" + nodeCli.get(key));
count++;
}
nodeCli.close();
}
}
SimpleDateFormat dfs = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss [");
System.out.println(dfs.format(new Date()) + pattern + "] query count->" + count + " useTime->"
+ ((System.currentTimeMillis() - beginTime)) + "ms ");
}
/**
* 按照key前缀统计
*/
public void countKeyLike(String pattern) {
Jedis jedis = new Jedis(REDIS_HOST, REDIS_PORT);
String nodes = jedis.clusterNodes();
jedis.close();
int count = 0;
long beginTime = System.currentTimeMillis();
for (String node : nodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
String host = nodeInfo[1].split(":")[0];
int port = Integer.valueOf(nodeInfo[1].split(":")[1]);
String type = nodeInfo[2];
if (type.contains("master")) {
Jedis nodeCli = new Jedis(host, port);//连接redis
Set<String> keys = nodeCli.keys(pattern);//
count += keys.size();
nodeCli.close();
}
}
SimpleDateFormat dfs = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss [");
System.out.println(dfs.format(new Date()) + pattern + "] count->" + count + " useTime->"
+ ((System.currentTimeMillis() - beginTime)) + "ms ");
}
/**
* 监控集群状态
*/
public void monitor(String[] args) {
double connected_clients = 0, total_commands_processed = 0, instantaneous_ops_per_sec = 0, total_net_input_bytes = 0, total_net_output_bytes = 0, instantaneous_input_kbps = 0, instantaneous_output_kbps = 0, used_memory = 0;
long keyTotalCount = 0;
DecimalFormat formatDouble = new DecimalFormat("##0.00");//格式化设置
DecimalFormat formatLong = new DecimalFormat("##0");//格式化设置
Map<String, String> opsMap = new TreeMap<String, String>();
Map<String, String> ramMap = new TreeMap<String, String>();
Map<String, String> inputMap = new TreeMap<String, String>();
Map<String, String> outputMap = new TreeMap<String, String>();
Iterator<Entry<String, JedisPool>> nodes = cluster.getClusterNodes().entrySet().iterator();
while (nodes.hasNext()) {
Entry<String, JedisPool> entry = nodes.next();
JedisPool pool = entry.getValue();
String info = null;
Jedis jedis;
try {
jedis = pool.getResource();
info = jedis.info();
pool.returnResourceObject(jedis);
} catch (JedisConnectionException e) {
String msg = e.getMessage();
if (msg.contains("Connection refused")) {
System.out.println(entry.getKey() + " Connection refused");
continue;
}
} catch (Exception e) {
e.printStackTrace();
}
if (info.contains("role:slave")) {//只统计master
continue;
}
connected_clients += getValue(info, "connected_clients");
total_commands_processed += getValue(info, "total_commands_processed");
instantaneous_ops_per_sec += getValue(info, "instantaneous_ops_per_sec");
opsMap.put(entry.getKey(), formatDouble.format(getValue(info, "instantaneous_ops_per_sec")));
total_net_input_bytes += getValue(info, "total_net_input_bytes");
total_net_output_bytes += getValue(info, "total_net_output_bytes");
instantaneous_input_kbps += getValue(info, "instantaneous_input_kbps");
inputMap.put(entry.getKey(), formatDouble.format(getValue(info, "instantaneous_input_kbps") / 1024) + "KB");
instantaneous_output_kbps += getValue(info, "instantaneous_output_kbps");
outputMap.put(entry.getKey(), formatDouble.format(getValue(info, "instantaneous_output_kbps") / 1024)
+ "KB");
used_memory += getValue(info, "used_memory");
ramMap.put(entry.getKey(), formatDouble.format(getValue(info, "used_memory") / 1024 / 1024) + "MB");
if (info.indexOf("db0:keys=") > 0) {
String value = info.substring(info.indexOf("db0:keys=") + "db0:keys=".length()).split(",")[0];
keyTotalCount += Long.valueOf(value);
}
}
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
StringBuffer sb = new StringBuffer();
sb.append(sdf.format(new Date()));
sb.append(",");
sb.append(formatLong.format(connected_clients));
sb.append(",");
sb.append(formatLong.format(total_commands_processed));
sb.append(",");
sb.append(formatLong.format(instantaneous_ops_per_sec));
sb.append(",");
sb.append(formatDouble.format(total_net_input_bytes / 1024 / 1024));
sb.append(",");
sb.append(formatDouble.format(total_net_output_bytes / 1024 / 1024));
sb.append(",");
sb.append(formatDouble.format(instantaneous_input_kbps));
sb.append(",");
sb.append(formatDouble.format(instantaneous_output_kbps));
sb.append(",");
sb.append(formatDouble.format(used_memory / 1024 / 1024));
sb.append(",");
sb.append(keyTotalCount);
System.out.println(sb.toString());
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter(SystemConf.confFileDir + "/monitor.csv", true));
bw.write(sb.toString());
bw.write('\r');
bw.write('\n');
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
*/
public void info(String[] args) {
double connected_clients = 0, total_commands_processed = 0, instantaneous_ops_per_sec = 0, total_net_input_bytes = 0, total_net_output_bytes = 0, instantaneous_input_kbps = 0, instantaneous_output_kbps = 0, used_memory = 0;
long keyTotalCount = 0;
DecimalFormat formatDouble = new DecimalFormat("#,##0.00");//格式化设置
DecimalFormat formatLong = new DecimalFormat("#,##0");//格式化设置
Map<String, String> opsMap = new TreeMap<String, String>();
Map<String, String> ramMap = new TreeMap<String, String>();
Map<String, String> inputMap = new TreeMap<String, String>();
Map<String, String> outputMap = new TreeMap<String, String>();
Iterator<Entry<String, JedisPool>> nodes = cluster.getClusterNodes().entrySet().iterator();
while (nodes.hasNext()) {
Entry<String, JedisPool> entry = nodes.next();
String info = null;
try {
info = entry.getValue().getResource().info();
} catch (JedisConnectionException e) {
String msg = e.getMessage();
if (msg.contains("Connection refused")) {
System.out.println(entry.getKey() + " Connection refused");
continue;
}
}
if (null == info || info.contains("role:slave")) {//只统计master
continue;
}
connected_clients += getValue(info, "connected_clients");
total_commands_processed += getValue(info, "total_commands_processed");
instantaneous_ops_per_sec += getValue(info, "instantaneous_ops_per_sec");
opsMap.put(entry.getKey(), formatDouble.format(getValue(info, "instantaneous_ops_per_sec")));
total_net_input_bytes += getValue(info, "total_net_input_bytes");
total_net_output_bytes += getValue(info, "total_net_output_bytes");
instantaneous_input_kbps += getValue(info, "instantaneous_input_kbps");
inputMap.put(entry.getKey(), formatDouble.format(getValue(info, "instantaneous_input_kbps") / 1024) + "KB");
instantaneous_output_kbps += getValue(info, "instantaneous_output_kbps");
outputMap.put(entry.getKey(), formatDouble.format(getValue(info, "instantaneous_output_kbps") / 1024)
+ "KB");
used_memory += getValue(info, "used_memory");
ramMap.put(entry.getKey(), formatDouble.format(getValue(info, "used_memory") / 1024 / 1024) + "MB");
if (info.indexOf("db0:keys=") > 0) {
String value = info.substring(info.indexOf("db0:keys=") + "db0:keys=".length()).split(",")[0];
keyTotalCount += Long.valueOf(value);
}
}
if (args.length >= 2) {
Iterator<Entry<String, String>> it;
for (int i = 0; i < args.length; i++) {
if ("ops".equals(args[i])) {
it = opsMap.entrySet().iterator();
System.out.println("instantaneous_ops_per_sec");
while (it.hasNext()) {
Entry<String, String> entry = it.next();
System.out.println(entry.getKey() + "->" + entry.getValue());
}
System.out.println("instantaneous_ops_per_sec:" + formatLong.format(instantaneous_ops_per_sec));
} else if ("input".equals(args[i])) {
it = inputMap.entrySet().iterator();
System.out.println("instantaneous_input_kbps");
while (it.hasNext()) {
Entry<String, String> entry = it.next();
System.out.println(entry.getKey() + "->" + entry.getValue());
}
System.out.println("total_net_input_bytes:"
+ formatDouble.format(total_net_input_bytes / 1024 / 1024) + "MB");
} else if ("output".equals(args[i])) {
it = outputMap.entrySet().iterator();
System.out.println("instantaneous_output_kbps");
while (it.hasNext()) {
Entry<String, String> entry = it.next();
System.out.println(entry.getKey() + "->" + entry.getValue());
}
System.out.println("total_net_output_bytes:"
+ formatDouble.format(total_net_output_bytes / 1024 / 1024) + "MB");
} else if ("ram".equals(args[i])) {
it = ramMap.entrySet().iterator();
System.out.println("used_memory");
while (it.hasNext()) {
Entry<String, String> entry = it.next();
System.out.println(entry.getKey() + "->" + entry.getValue());
}
System.out.println("used_memory:" + formatDouble.format(used_memory / 1024 / 1024) + "MB");
}
}
} else {
System.out.println("connected_clients:" + formatLong.format(connected_clients));
System.out.println("total_commands_processed:" + formatLong.format(total_commands_processed));
System.out.println("instantaneous_ops_per_sec:" + formatLong.format(instantaneous_ops_per_sec));
System.out.println("total_net_input_bytes:" + formatDouble.format(total_net_input_bytes / 1024 / 1024)
+ "MB");
System.out.println("total_net_output_bytes:" + formatDouble.format(total_net_output_bytes / 1024 / 1024)
+ "MB");
System.out.println("instantaneous_input_kbps:" + formatDouble.format(instantaneous_input_kbps));
System.out.println("instantaneous_output_kbps:" + formatDouble.format(instantaneous_output_kbps));
System.out.println("used_memory:" + formatDouble.format(used_memory / 1024 / 1024) + "MB");
System.out.println("keyTotalCount:" + keyTotalCount);
}
}
private double getValue(String info, String key) {
String value;
value = info.substring(info.indexOf(key) + key.length() + 1).split("\r\n")[0];
return Double.valueOf(value);
}
/**
* 查询key
*/
public void keys(String pattern) {
Jedis jedis = new Jedis(REDIS_HOST, REDIS_PORT);
String nodes = jedis.clusterNodes();
jedis.close();
int count = 0;
long beginTime = System.currentTimeMillis();
for (String node : nodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
String host = nodeInfo[1].split(":")[0];
int port = Integer.valueOf(nodeInfo[1].split(":")[1]);
String type = nodeInfo[2];
if (type.contains("master")) {
Jedis nodeCli = new Jedis(host, port);//连接redis
Set<String> keys = nodeCli.keys(pattern);// TODO change use scan
for (String key : keys) {
System.out.println(key);
}
nodeCli.close();
}
}
SimpleDateFormat dfs = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss [");
System.out.println(dfs.format(new Date()) + pattern + "*] count->" + count + " useTime->"
+ ((System.currentTimeMillis() - beginTime)) + "ms ");
}
/**
* 统计所有的key数量
*/
public long keySize() {
Jedis jedis = new Jedis(REDIS_HOST, REDIS_PORT);
String nodes = jedis.clusterNodes();
jedis.close();
long count = 0;
long beginTime = System.currentTimeMillis();
for (String node : nodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
String host = nodeInfo[1].split(":")[0];
int port = Integer.valueOf(nodeInfo[1].split(":")[1]);
String type = nodeInfo[2];
if (type.contains("master") && !type.contains("fail")) {
try {
Jedis nodeCli = new Jedis(host, port);//连接redis
String info = nodeCli.info("Keyspace");
if (info.indexOf("db0:keys=") > 0) {
String value = info.substring(info.indexOf("db0:keys=") + "db0:keys=".length()).split(",")[0];
count += Long.valueOf(value);
}
nodeCli.close();
} catch (Exception e) {
}
}
}
System.out.println("clusterKeySize:" + count + " useTime->" + ((System.currentTimeMillis() - beginTime))
+ "ms ");
return count;
}
/**
* 按照key前缀清除缓存
* @param pattern
*/
public void dels(String keyPre, final String filePath) {
final String[] exportKeyPre = keyPre.split(",");
createExportFile(filePath);
Iterator<Entry<String, JedisPool>> nodes = cluster.getClusterNodes().entrySet().iterator();
List<Thread> exportTheadList = new ArrayList<Thread>();
while (nodes.hasNext()) {
Entry<String, JedisPool> entry = nodes.next();
try {
entry.getValue().getResource();
} catch (redis.clients.jedis.exceptions.JedisConnectionException e) {//有失败的节点连不上
System.out.println(entry.getKey() + " conn error:" + e.getMessage());
continue;
}
final Jedis nodeCli = entry.getValue().getResource();
String info = entry.getValue().getResource().info();
if (info.contains("role:slave")) {//只能从master删除
continue;
}
Thread exportThread = new Thread(new Runnable() {
@Override
public void run() {
String cursor = "0";
do {
ScanResult<String> keys = nodeCli.scan(cursor, sp);
cursor = keys.getStringCursor();
List<String> result = keys.getResult();
for (String key : result) {
for (String keyExport : exportKeyPre) {
if ("*".equals(keyExport) || key.startsWith(keyExport)) {
nodeCli.del(key);
writeFile(key, "del", filePath);
break;
}
}
}
} while ((!"0".equals(cursor)));
}
}, entry.getKey() + "del thread");
exportTheadList.add(exportThread);
exportThread.start();
}
for (Thread thread : exportTheadList) {
do {
if (thread.isAlive()) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
} while (thread.isAlive());
}
long useTime = System.currentTimeMillis() - writeBeginTime, totalCount = readCount.get();
float speed = (float) (totalCount / (useTime / 1000.0));
System.out.println("del total:" + totalCount + " speed:" + speedFormat.format(speed) + " useTime:"
+ (useTime / 1000.0) + "s");
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 按照key前缀统计
*/
public void printKeyLike(String pattern) {
Jedis jedis = new Jedis(REDIS_HOST, REDIS_PORT);
String nodes = jedis.clusterNodes();
jedis.close();
int count = 0;
long beginTime = System.currentTimeMillis();
for (String node : nodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
String host = nodeInfo[1].split(":")[0];
int port = Integer.valueOf(nodeInfo[1].split(":")[1]);
String type = nodeInfo[2];
if (type.contains("master")) {
Jedis nodeCli = new Jedis(host, port);//连接redis
Set<String> keys = nodeCli.keys(pattern);//
count += keys.size();
nodeCli.close();
}
}
SimpleDateFormat dfs = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss [");
System.out.println(dfs.format(new Date()) + pattern + "] count->" + count + " useTime->"
+ ((System.currentTimeMillis() - beginTime)) + "ms ");
}
/**
* java -jar redis-cluster-util-jar-with-dependencies.jar h
*/
@SuppressWarnings("unchecked")
public static void main(String[] args) throws Exception {
// args = new String[] { "add-master", "172.20.16.87:29000", "172.20.16.88:29000", "172.20.16.89:29000" };
// args = new String[] { "add-master", "172.20.16.87:29000", "172.20.16.88:29000" };
//args = new String[] { "add-master", "172.20.16.87:29005" };
// args = new String[] { "analyze", "isKeyStat=true", "isCmdDetail=true", "showTop=20", "host=172.20.16.48",
// "port=5001", "monitorTime=5" };
//args = new String[] { "add-slave","172.20.16.87:29000->172.20.16.88:29000;172.20.16.87:29001->172.20.16.88:29001" };
//args = new String[] { "add-slave","172.20.16.87:29001->172.20.16.88:29001" };
//args = new String[] { "add-node", "172.20.16.91:29010", "172.20.16.89:29010" };
//args = new String[] { "bakup-node", "D://abc" };
// args = new String[] { "benchmark", "E:/bakup/jumei-app/show-dev-data-export.dat", "10" };
// args = new String[] { "check" };
// args = new String[] { "count" };
//args = new String[] { "create",
// "172.20.16.87:29000->172.20.16.88:29000;172.20.16.87:29001->172.20.16.88:29001;172.20.16.87:29002->172.20.16.88:29002" };
// args = new String[] { "del" };
// args = new String[] { "dels" };
// args = new String[] { "del-node", ":0" };
// args = new String[] { "del-node", "172.20.16.87:29000" };
// args = new String[] { "del-node", "172.20.16.88:29000;172.20.16.89:29000" };
//args = new String[] { "export", "*", "d:/show-dev-data-export.dat" };
//args = new String[] { "export-keys", "s_f_p_9186_86964530,s_f_p_7580_68233821", "d:/show-key-export.dat" };
// args = new String[] { "export-keys-file", "d:/keys.txt", "d:/show-key-export.dat" };
//args = new String[] { "fix-slot", "172.20.16.88:29000" };
// args = new String[] { "failover", "192.168.254.130:5001" };
// args = new String[] { "fix-slot-cover", "192.168.254.129:5001" };
// args = new String[] { "fix-slot-stable", "192.168.254.129:5001" };
// args = new String[] { "flush" };
// args = new String[] { "get" };
// args = new String[] { "import", "l,s", "d:/show-dev-data-export.dat" };
//args = new String[] { "import", "*", "E:/bakup/jumei-app/show-online-2016.2.3.dat" };
// args = new String[] { "import-mongodb", "*", "D:/bakup/jumeiapp-redis/show-imported-list.2016.1.11.dat" };
// args = new String[] { "info" };
// args = new String[] { "info", "output", "ops" };
// args = new String[] { "keys"};
// args = new String[] { "keysize"};
//args = new String[] { "monitor", "2" };
// args = new String[] { "raminfo", "*" };
//args = new String[] { "raminfo", "172.20.16.89:5001" };
//args = new String[] { "rubbish-del" };
//args = new String[] { "key-size-count", "u_id_set", "D:/" };
//args = new String[] { "reshard", "172.20.16.87:29000", "0-1024;1025-2048;4096-4096;4098-4301" };
//"reshard" "192.168.254.129:5000" "0-1024;1025-2048;4096-4096;4098-4301"
// args = new String[] { "set", "testkey", "testvalue" };
// args = new String[] { "h" };
//args = new String[] { "followRestore", "*", "D:/29000-u_f.dat" };
Runtime.getRuntime().addShutdownHook(new CleanWorkThread());
RedisClusterManager rcm = new RedisClusterManager();
long beginTime = System.currentTimeMillis();
if (args.length == 0) {
printHelp();
return;
}
String cmd = args[0];
if ("raminfo".equals(cmd) || "exporth".equals(cmd) || "exportHostKeys".equals(cmd)) {
} else {
connectCluser();
}
if (args.length > 0) {
if ("add-slave".equals(cmd)) {
if (args.length == 2) {
String[] master2slave = trim(args[1]).split(";");
for (int i = 0; i < master2slave.length; i++) {
String[] hostsInfo = master2slave[i].split("->");
if (hostsInfo.length == 2) {
rcm.addSlave(hostsInfo[0], hostsInfo[1], false);
} else {
System.out.println("请输入要添加的节点及主节点列表");
}
}
Thread.sleep(3000);//等待集群配置同步
rcm.check();
} else {
System.out.println("请输入主备关系:host1:port1->host2:port1;host1:port2->host2:port2;");
}
} else if ("bakup-node".equals(cmd)) {
if (args.length == 2) {
rcm.bakupNode(args[1]);
} else {
System.out.println("参数错误!");
}
} else if ("analyze".equals(cmd)) {
MonitorUtil.main(args);
} else if ("fansCount".equals(cmd)) {
if (args.length == 2) {
rcm.fansCount(args[1]);
} else {
System.out.println("fansCount D:/export.dat");
}
} else if ("exportHostKeys".equals(cmd)) {
if (args.length == 5) {
rcm.exportHostKeys(args[1], args[2], args[3], args[4]);
} else {
System.out.println("exportHostKeys ip port key1,key2 D:/export.dat");
}
} else if ("followDel".equals(cmd)) {
if (args.length == 3) {
rcm.followDel(args[1], args[2]);
} else {
System.out.println("followDel D:/u_f_uid_delete.dat");
}
} else if ("followAttentionDel".equals(cmd)) {
if (args.length == 3) {
rcm.followAttentionDel(args[1], args[2]);
} else {
System.out.println("followAttentionDel D:/u_a_uid_delete.dat");
}
} else if ("followRestore".equals(cmd)) {
if (args.length == 3) {
rcm.followRestore(args[1], args[2]);
} else {
System.out.println("followRestore D:/29000-u_f.dat");
}
} else if ("praiseDel".equals(cmd)) {
if (args.length == 3) {
rcm.praiseDel(args[1], args[2]);
} else {
System.out.println("praiseDel D:/input.dat");
}
} else if ("praiseCountDel".equals(cmd)) {
if (args.length == 3) {
rcm.praiseCountDel(args[1], args[2]);
} else {
System.out.println("praiseCountDel D:/input.dat");
}
} else if ("praiseCount".equals(cmd)) {
if (args.length == 3) {
rcm.praiseCount(args[1], args[2]);
} else {
System.out.println("praiseCount D:/export.dat");
}
} else if ("uaCheck".equals(cmd)) {
if (args.length == 2) {
rcm.uaCheck(args[1]);
} else {
System.out.println("fansCheck D:/export.dat");
}
} else if ("ufCheck".equals(cmd)) {
if (args.length == 2) {
rcm.ufCheck(args[1]);
} else {
System.out.println("fansCheck D:/export.dat");
}
} else if ("raminfo".equals(cmd)) {
if (args.length == 2) {
rcm.raminfo(args[1]);
} else {
connectCluser();
rcm.raminfo(null);
}
} else if ("rubbish-del".equals(cmd)) {
rcm.rubbishH5Del();
} else if ("create".equals(cmd)) {
StringBuffer sb = new StringBuffer();
for (int i = 1; i < args.length; i++) {
sb.append(args[i]);
}
String hostTrim = trim(sb.toString());
String[] master2slave = hostTrim.split(";");
rcm.create(rcm, master2slave);
Thread.sleep(3000);//等待集群配置同步
rcm.check();
} else if ("reshard".equals(cmd)) {
rcm.reshard(args);
} else if ("failover".equals(cmd)) {
String[] slaves = trim(args[1]).split(";");
for (String slave : slaves) {
rcm.failOver(slave);
}
Thread.sleep(3000);//等待集群配置同步
rcm.check();
} else if ("fix-slot-cover".equals(cmd)) {
rcm.fixSlotCover(args[1]);
Thread.sleep(3000);//等待集群配置同步
rcm.check();
} else if ("fix-slot-stable".equals(cmd)) {
rcm.fixSlotStable();
Thread.sleep(3000);//等待集群配置同步
rcm.check();
} else if ("add-master".equals(cmd)) {
if (args.length >= 2) {
rcm.addMaster(args);
Thread.sleep(3000);//等待集群配置同步
rcm.check();
} else {
System.out.println("请输入要添加的 主节点");
}
} else if ("dels".equals(cmd)) {
if (args.length == 3) {
rcm.dels(args[1], args[2]);
} else {
System.out.println("dels keyPattern D:/delKey.dat");
}
} else if ("counts".equals(cmd)) {
if (args.length == 1) {
System.out.println("请输入要统计的key前缀");
} else {
for (int i = 1; i < args.length; i++) {
rcm.countKeyLike(args[i]);
}
}
} else if ("del-node".equals(cmd)) {
if (args.length == 2) {
String[] hostsInfo = trim(args[1]).split(";");
for (int i = 0; i < hostsInfo.length; i++) {
rcm.delNode(hostsInfo[i]);
}
Thread.sleep(3000);//等待集群配置同步
rcm.check();
} else {
System.out.println("请输入要删除的节点:host1:port1;host2:port2;");
}
} else if ("querys".equals(cmd)) {
if (args.length == 1) {
rcm.queryKeyLike("");
} else {
for (int i = 1; i < args.length; i++) {
rcm.queryKeyLike(args[i]);
}
}
} else if ("export".equals(cmd)) {
if (args.length == 3) {
rcm.exportKeyPre(args[1], args[2]);
} else {
System.out.println("export keyPattern D:/export.dat");
}
} else if ("exporth".equals(cmd)) {
if (args.length == 3) {
rcm.exportKeyOneHost(args[1], args[2]);
} else {
System.out.println("export keyPattern D:/export.dat");
}
} else if ("export-keys".equals(cmd)) {
if (args.length == 3) {
rcm.exportKeys(args[1], args[2]);
} else {
System.out.println("export keys D:/export.dat");
}
} else if ("export-keys-file".equals(cmd)) {
if (args.length == 3) {
rcm.exportKeysFile(args[1], args[2]);
} else {
System.out.println("export keys D:/export.dat");
}
} else if ("import".equals(cmd)) {
if (args.length == 3) {
rcm.importKey(args[1], args[2]);
} else {
System.out.println("import keyPattern D:/import.dat");
}
} else if ("import-mongodb".equals(cmd)) {
if (args.length == 3) {
rcm.importMongodb(args[1], args[2]);
} else {
System.out.println("import keyPattern D:/import.dat");
}
} else if ("restoreUserHash".equals(cmd)) {
if (args.length == 2) {
rcm.restoreUserHash(args[1]);
} else {
System.out.println("restoreUserHash uid1,uid2");
}
} else if ("restoreShowPraise".equals(cmd)) {
if (args.length == 2) {
rcm.restoreShowPraise(args[1]);
} else {
System.out.println("restoreUserHash uid1,uid2");
}
} else if ("followRestoreByUids".equals(cmd)) {
if (args.length == 2) {
rcm.followRestoreByUids(args[1]);
} else {
System.out.println("followRestoreByUids uid1,uid2");
}
} else if ("set".equals(cmd) || "del".equals(cmd)) {
rcm.opt(args);
} else if ("get".equals(cmd)) {
rcm.opt(args);
} else if ("keys".equals(cmd)) {
if (args.length == 1) {
System.out.println("请输入要查詢的key前缀");
} else {
rcm.keys(args[1]);
}
} else if ("keysize".equals(cmd)) {
rcm.keySize();
} else if ("key-size-count".equals(cmd)) {
if (args.length == 3) {
rcm.keySizeCount(args[1], args[2]);
} else {
System.out.println("key-size-count u_id_set D:/");
}
} else if ("info".equals(cmd)) {
rcm.info(args);
} else if ("monitor".equals(cmd)) {
long sleepTime = 1000;
if (args.length == 2) {
sleepTime = Long.valueOf(args[1]) * 1000;
}
while (true) {
try {
rcm.monitor(args);
} catch (Throwable e) {
e.printStackTrace();
}
Thread.sleep(sleepTime);
}
} else if ("check".equals(cmd)) {
rcm.check();
} else if ("flush".equals(cmd)) {
rcm.flushall();
} else if ("h".equals(cmd) || "-h".equals(cmd) || "help".equals(cmd)) {
printHelp();
} else {
Jedis jedis = new Jedis(REDIS_HOST, REDIS_PORT);
Map<Integer, String> slot2Host = new HashMap<Integer, String>();
List<Object> slotInfos = jedis.clusterSlots();
for (Object slotInfo : slotInfos) {
List<Object> slotInfoList = (List<Object>) slotInfo;
long begin = (Long) slotInfoList.get(0);
long end = (Long) slotInfoList.get(1);
@SuppressWarnings("rawtypes")
List hostInfoList = (ArrayList) slotInfoList.get(2);
String host = new String((byte[]) hostInfoList.get(0));
int port = Integer.valueOf(hostInfoList.get(1).toString());
String hostInfo = host + ":" + port;
for (int i = (int) begin; i <= end; i++) {
slot2Host.put(i, hostInfo);
}
}
jedis.close();
String key = args[1];
int slot = JedisClusterCRC16.getCRC16(key) % 16384;
String[] hostInfo = null;
String hostPort = slot2Host.get(slot);
if (null != hostPort) {
hostInfo = hostPort.split(":");
cmd = "redis-cli -h " + hostInfo[0] + " -p " + hostInfo[1];
for (int i = 0; i < args.length; i++) {
cmd = cmd + " " + args[i];
}
executeCmd(cmd);
} else {
System.out.println("not cover solt:" + slot);
}
}
for (String arg : args) {
System.out.print(arg + " ");
}
System.out.println("finish use time " + ((System.currentTimeMillis() - beginTime)) + "ms");
}
}
public Map<String, AtomicLong> ramSizeCount = new ConcurrentHashMap<String, AtomicLong>();
public Map<String, AtomicLong> ramKeyCount = new ConcurrentHashMap<String, AtomicLong>();
public StringBuffer ramUnknowKey = new StringBuffer();
private void writeRamInfo() {
BufferedWriter raminfoUnknow = null;
try {
Iterator<Entry<String, AtomicLong>> it = ramKeyCount.entrySet().iterator();
System.out.println("key type size:" + ramKeyCount.size());
bw = new BufferedWriter(new FileWriter(SystemConf.confFileDir + "/raminfo.csv"));
bw = new BufferedWriter(new FileWriter(SystemConf.confFileDir + "/raminfo.csv"));
while (it.hasNext()) {
Entry<String, AtomicLong> entry = it.next();
String info = entry.getKey() + "," + entry.getValue() + "," + ramSizeCount.get(entry.getKey()) + "\r\n";
bw.write(info);
}
raminfoUnknow = new BufferedWriter(new FileWriter(SystemConf.confFileDir + "/raminfoUnknowKey.txt", true));
ramUnknowKey.append("\r\n");
raminfoUnknow.write(ramUnknowKey.toString());
ramUnknowKey = new StringBuffer();
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
try {
if (null != raminfoUnknow) {
raminfoUnknow.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
* 按key分类进行统计
*/
public void raminfo(String node) {
List<Thread> exportTheadList = new ArrayList<Thread>();
if (null != node) {
String[] hostInfo = node.split(":");
Jedis jedis = new Jedis(hostInfo[0], Integer.valueOf(hostInfo[1]));
nodeAnalyze(exportTheadList, node, jedis);
} else {
Iterator<Entry<String, JedisPool>> nodes = cluster.getClusterNodes().entrySet().iterator();
while (nodes.hasNext()) {
Entry<String, JedisPool> entry = nodes.next();
if (null != node) {
if (!node.equals(entry.getKey())) {
continue;
}
}
final Jedis nodeCli = entry.getValue().getResource();
String info = entry.getValue().getResource().info();
if (null == node && info.contains("role:slave")) {//如果没有指定节点,统计所有master
continue;
}
nodeAnalyze(exportTheadList, entry.getKey(), nodeCli);
}
}
for (Thread thread : exportTheadList) {
do {
if (thread.isAlive()) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
} while (thread.isAlive());
}
writeRamInfo();
long useTime = System.currentTimeMillis() - writeBeginTime, totalCount = readCount.get();
float speed = (float) (totalCount / (useTime / 1000.0));
System.out.println("scan total:" + totalCount + " speed:" + speedFormat.format(speed) + " useTime:"
+ (useTime / 1000.0) + "s");
}
private void nodeAnalyze(List<Thread> exportTheadList, String node, final Jedis nodeCli) {
Thread exportThread = new Thread(new Runnable() {
@Override
public void run() {
String cursor = "0";
int len = "serializedlength:".length();
do {
ScanResult<String> keys = nodeCli.scan(cursor, sp);
cursor = keys.getStringCursor();
List<String> result = keys.getResult();
for (String key : result) {
String debug = nodeCli.debug(DebugParams.OBJECT(key));
int startIndex = debug.indexOf("serializedlength:");
int endIndex = debug.indexOf(" ", startIndex);
debug = debug.substring(startIndex + len, endIndex);
int i = 0;
//key = "s_c_p23926";//testkey
//key = "26228273praiseto101909365showid10290";//testkey
if (key.startsWith("rpcUserInfo")) {
key = "rpcUserInfo";
} else if (key.startsWith("s_url")) {
key = "s_url";
} else if (key.startsWith("live_link_")) {
key = "live_link_";
} else if (key.startsWith("historyappmessages")) {
key = "historyappmessages";
} else if (key.startsWith("historyadminmessages")) {
key = "historyadminmessages";
} else if (key.contains("praiseto") && key.contains("showid")) {
key = "praisetoshowid";
} else if (key.contains("followuser")) {
key = "followuser";
} else if (key.startsWith("user_relations")) {
key = "user_relations";
} else if (key.startsWith("user_relation_")) {
key = "user_relation_";
} else {
char c;
boolean isFindDecollator = false, isKnowBusiness = false;
for (; i < key.length(); i++) {
c = key.charAt(i);
if (key.charAt(i) == '_') {
isFindDecollator = true;
}
if (c == ':') {
isFindDecollator = true;
key = key.substring(0, i);
break;
} else if (isFindDecollator && i > 0 && c >= '0' && c <= '9') {
key = key.substring(0, i);
isKnowBusiness = true;
break;
}
}
if (!isKnowBusiness && !isFindDecollator) {//没有加业务前缀
ramUnknowKey.append(key).append(',');
key = "unknown";
}
}
AtomicLong sizeCount = ramSizeCount.get(key);
if (null == sizeCount) {
sizeCount = new AtomicLong();
ramSizeCount.put(key, sizeCount);
}
sizeCount.addAndGet(Long.valueOf(debug));
AtomicLong keyCount = ramKeyCount.get(key);
if (null == keyCount) {
keyCount = new AtomicLong();
ramKeyCount.put(key, keyCount);
}
keyCount.incrementAndGet();
long scanCount = readCount.incrementAndGet();
if (scanCount % 100000 == 0) {
System.out.print("scan key size:" + scanCount);
writeRamInfo();
}
}
} while ((!"0".equals(cursor)));
}
}, node + "-raminfo");
exportTheadList.add(exportThread);
exportThread.start();
}
/**
* 删除关注流垃圾key
*/
public void rubbishH5Del() {
final String[] exportKeyPre = "s_u_f_,s_f_l_,s_f_p_".split(",");
final String filePath = SystemConf.confFileDir + "/deleted-data.txt";
createExportFile(filePath);
Iterator<Entry<String, JedisPool>> nodes = cluster.getClusterNodes().entrySet().iterator();
List<Thread> exportTheadList = new ArrayList<Thread>();
while (nodes.hasNext()) {
Entry<String, JedisPool> entry = nodes.next();
final Jedis nodeCli = entry.getValue().getResource();
String info = entry.getValue().getResource().info();
if (info.contains("role:slave")) {
continue;
}
final String checkFiled = "longUrl";
Thread exportThread = new Thread(new Runnable() {
@Override
public void run() {
String cursor = "0";
do {
ScanResult<String> keys = nodeCli.scan(cursor, sp);
cursor = keys.getStringCursor();
List<String> result = keys.getResult();
for (String key : result) {
boolean isAttetionKey = false;
for (String keyExport : exportKeyPre) {
if ("*".equals(keyExport) || key.startsWith(keyExport)) {
exportKeys(key, filePath);
nodeCli.del(key);//关注流删除
delCount.incrementAndGet();
isAttetionKey = true;
break;
}
}
if (!isAttetionKey) {
if (key.startsWith("rpcUserInfo")) {
key = "rpcUserInfo";
} else if (key.startsWith("s_url")) {
key = "s_url";
} else if (key.startsWith("live_link_")) {
key = "live_link_";
} else if (key.startsWith("historyappmessages")) {
key = "historyappmessages";
} else if (key.startsWith("historyadminmessages")) {
key = "historyadminmessages";
} else if (key.contains("praiseto") && key.contains("showid")) {
key = "praisetoshowid";
} else if (key.contains("followuser")) {
key = "followuser";
} else if (key.startsWith("user_relations")) {
key = "user_relations";
} else if (key.startsWith("user_relation_")) {
key = "user_relation_";
} else {
char c;
boolean isFindDecollator = false, isKnowBusiness = false;
int i = 0;
for (; i < key.length(); i++) {
c = key.charAt(i);
if (key.charAt(i) == '_') {
isFindDecollator = true;
}
if (isFindDecollator && i > 0 && c >= '0' && c <= '9') {
key = key.substring(0, i);
isKnowBusiness = true;
break;
}
}
if (!isKnowBusiness && !isFindDecollator) {//没有加业务前缀
String keyType = nodeCli.type(key);
if ("hash".equals(keyType)) {
String value = nodeCli.hget(key, checkFiled);
if (null != value && value.contains("share/lv.jsp")) {
exportKeys(key, filePath);
nodeCli.del(key);
delCount.incrementAndGet();
}
} else if ("string".equals(keyType)) {
String value = nodeCli.get(key);
if (value.length() == 6) {
exportKeys(key, filePath);
nodeCli.del(key);
delCount.incrementAndGet();
}
}
}
}
}
long scanCount = readCount.incrementAndGet();
if (scanCount % 10000 == 0) {
System.out.println("scan key size:" + scanCount + " del key size:" + delCount.get());
}
}
} while ((!"0".equals(cursor)));
}
}, entry.getKey() + "del thread");
exportTheadList.add(exportThread);
exportThread.start();
}
for (Thread thread : exportTheadList) {
do {
if (thread.isAlive()) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
} while (thread.isAlive());
}
long useTime = System.currentTimeMillis() - writeBeginTime, totalCount = readCount.get();
float speed = (float) (totalCount / (useTime / 1000.0));
System.out.println("scan total:" + totalCount + " del key size:" + delCount.get() + " speed:"
+ speedFormat.format(speed) + " useTime:" + (useTime / 1000.0) + "s");
}
private void bakupNode(String filePath) {
Jedis jedis = new Jedis(REDIS_HOST, REDIS_PORT);
String nodes = jedis.clusterNodes();
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter(filePath));
for (String node : nodes.split("\n")) {
bw.write(node);
bw.write("\\n");
}
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (null != bw) {
bw.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
private static String trim(String sb) {
String hostTrim = sb.toString().replace(" ", "");
hostTrim = hostTrim.replace("\r", "");
hostTrim = hostTrim.replace("\n", "");
hostTrim = hostTrim.replace("\\", "");
return hostTrim;
}
//"reshard", "172.20.162.87:29000", "0-1024;1025-2048"
@SuppressWarnings({ "rawtypes", "unchecked" })
private void reshard(String[] args) {
Jedis jedis = new Jedis(REDIS_HOST, REDIS_PORT);
Jedis destinationNode = connect(args[1]);
String[] destinationHostInfo = args[1].split(":");
String destinationHost = destinationHostInfo[0];
int destinationPort = Integer.valueOf(destinationHostInfo[1]);
String nodes = jedis.clusterNodes();
Map<String, String> host2NodeId = new HashMap<String, String>();
String destination_node_id = null;
List<Jedis> clusterHostList = new ArrayList<Jedis>();
for (String node : nodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
String nodeId = nodeInfo[0];
String host = nodeInfo[1];
String type = nodeInfo[2];
String[] hostInfo = nodeInfo[1].split(":");
clusterHostList.add(new Jedis(hostInfo[0], Integer.parseInt(hostInfo[1])));
if (args[1].equals(host)) {
destination_node_id = nodeId;
if (type.contains("master")) {
destination_node_id = nodeId;
} else {
System.out.println(args[1] + " is not master !");
jedis.close();
return;
}
}
if (type.contains("master")) {
host2NodeId.put(host, nodeId);
}
}
if (null == destination_node_id) {
System.out.println(args[1] + " destination_node_id not found");
jedis.close();
return;
}
byte[] coverSlot = new byte[16384];
Map<Integer, Jedis> slot2Host = new HashMap<Integer, Jedis>();
Map<Integer, String> slot2NodeId = new HashMap<Integer, String>();
Map<String, Jedis> host2Jedis = new HashMap<String, Jedis>();
List<Object> slotInfos = jedis.clusterSlots();
for (Object slotInfo : slotInfos) {
List<Object> slotInfoList = (List<Object>) slotInfo;
long begin = (Long) slotInfoList.get(0);
long end = (Long) slotInfoList.get(1);
List hostInfoList = (ArrayList) slotInfoList.get(2);
String host = new String((byte[]) hostInfoList.get(0));
int port = Integer.valueOf(hostInfoList.get(1).toString());
String hostInfo = host + ":" + port;
for (int i = (int) begin; i <= end; i++) {
coverSlot[i] = 1;
Jedis jedisHost = host2Jedis.get(hostInfo);
if (null == jedisHost) {
jedisHost = new Jedis(host, port);
host2Jedis.put(hostInfo, jedisHost);
}
slot2Host.put(i, jedisHost);
slot2NodeId.put(i, host2NodeId.get(hostInfo));
}
}
String[] slots2Migrating = args[2].split(";");
int slotBegin = 0, slotEnd = 0;
int timeout = 15000, migratCount = 10;
for (String slotRange : slots2Migrating) {
String[] slotInfo = slotRange.split("-");
slotBegin = Integer.valueOf(slotInfo[0]);
if (slotInfo.length == 1) {
slotEnd = slotBegin;
} else if (slotInfo.length == 2) {
slotEnd = Integer.valueOf(slotInfo[1]);
} else {
System.out.println("参数错误!");
jedis.close();
return;
}
System.out.println("migrate slot " + slotRange + " ...");
for (int slot = slotBegin; slot <= slotEnd; slot++) {
Jedis sourceNode = slot2Host.get(slot);
String source_node_id = slot2NodeId.get(slot);
if (null == source_node_id) {
System.out.println(slot + " source_node_id not found");
continue;
}
if (source_node_id.equals(destination_node_id)) {//同一主机
continue;
}
destinationNode.clusterSetSlotImporting(slot, source_node_id);//step 1 必需在第二步前
sourceNode.clusterSetSlotMigrating(slot, destination_node_id);//step 2
List<String> keysInSlot;
do {
keysInSlot = sourceNode.clusterGetKeysInSlot(slot, migratCount);
for (String key : keysInSlot) {
try {
sourceNode.migrate(destinationHost, destinationPort, key, 0, timeout);////step 3
} catch (RuntimeException e) {
String msg = e.getMessage();
e.printStackTrace();
if (msg.contains("BUSYKEY Target key name already exists")) {
System.out.println(key + " BUSYKEY Target key name already exists");
continue;
}
System.out.println("迁移终止,当前slot:" + slot + " key:" + key);
return;
}
}
} while (keysInSlot.size() != 0);
try {
//如果目标节点变成从节点会存在slot丢失
String checkNodes = destinationNode.clusterNodes();
if (!checkNodes.contains("myself,master")) {
System.out.println("目标节点不是主节点,迁移终止,当前slot位置:" + slot);
return;
}
sourceNode.clusterSetSlotNode(slot, destination_node_id);//step 4 source or destination
destinationNode.clusterSetSlotNode(slot, destination_node_id);//
} catch (redis.clients.jedis.exceptions.JedisDataException e) {
String msg = e.getMessage();
if (msg.contains("I still hold keys")) {
slot--; //高写入的情况下可能写入了新数据,否则数据会发生丢失
System.out.println(slot + ",I still hold keys,try again");
continue;
} else {
e.printStackTrace();
System.out.println("有节点失败,迁移终止,当前slot位置:" + slot);
return;
}
} catch (Throwable e) {
e.printStackTrace();
System.out.println("有节点失败,迁移终止,当前slot位置:" + slot);
return;
}
for (Jedis notify : clusterHostList) {
try {
notify.clusterSetSlotNode(slot, destination_node_id);
} catch (Throwable e) {
e.printStackTrace();
System.out.println("有节点失败,迁移终止,当前slot位置:" + slot);
return;
}
}
//必须强一致性,否则正在迁移的两个节点有失败,solt信息来不及同步会存在丢失slot的情况
for (Jedis notify : clusterHostList) {
int waitCount = 0;
boolean isSync = false;
String nodeCheck = null;
do {
try {
nodeCheck = notify.clusterInfo();
} catch (redis.clients.jedis.exceptions.JedisConnectionException e) {
e.printStackTrace();
System.out.println("有节点失败,迁移终止,当前slot位置:" + slot);
}
isSync = nodeCheck.contains("cluster_slots_ok:16384");
if (!isSync) {
waitCount++;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
System.out.println("wait conf sync " + waitCount + " ...");
}
} while (!isSync);
}
if (slot % 1 == 0) {//5000W个key的迁移速度还是比较慢的
System.out.println("migrate slot " + slot + " done");
}
}
System.out.println("migrate slot " + slotRange + " done");
}
destinationNode.close();
jedis.close();
}
private void create(RedisClusterManager rcm, String[] master2slave) {
String[] masterHost = master2slave[0].split("->");
String[] hostInfo = masterHost[0].split(":");
String host = hostInfo[0];
int port = Integer.parseInt(hostInfo[1]);
//meet
for (int i = 0; i < master2slave.length; i++) {
String[] hostsInfo = master2slave[i].split("->");
if (hostsInfo.length == 2) {
Jedis clusterNode = connect(hostsInfo[0]);
Jedis slaveNode = connect(hostsInfo[1]);
try {
clusterNode.clusterMeet(host, port);
clusterNode.close();
slaveNode.clusterMeet(host, port);
slaveNode.close();
} catch (redis.clients.jedis.exceptions.JedisConnectionException e) {
System.out.println(hostsInfo[1] + " clusterMeet connect error!");
}
} else {
System.out.println("请输入要添加的节点及主节点列表");
}
}
System.out.println("cluster send meet all!");
//set slot
int slot = 16384 / master2slave.length;
int slotIndex = 0;
for (int i = 0; i < master2slave.length; i++) {
String[] hostsInfo = master2slave[i].split("->");
Jedis clusterNode = connect(hostsInfo[0]);
int thisBegin = slotIndex;
for (; slotIndex <= (i + 1) * slot; slotIndex++) {
try {
clusterNode.clusterAddSlots(slotIndex);
} catch (redis.clients.jedis.exceptions.JedisDataException e) {
String msg = e.getMessage();
if (msg.contains("is already busy")) {
} else {
e.printStackTrace();
}
} catch (redis.clients.jedis.exceptions.JedisConnectionException e2) {
System.out.println(hostsInfo[0] + " clusterAddSlots connect error!");
}
}
if (i == master2slave.length - 1) {//最后一个节点进行slot补全
for (; slotIndex < 16384; slotIndex++) {
try {
clusterNode.clusterAddSlots(slotIndex);
} catch (redis.clients.jedis.exceptions.JedisDataException e) {
String msg = e.getMessage();
if (msg.contains("is already busy")) {
} else {
e.printStackTrace();
}
} catch (redis.clients.jedis.exceptions.JedisConnectionException e2) {
System.out.println(hostsInfo[0] + " clusterAddSlots connect error!");
}
}
}
System.out.println(hostsInfo[0] + " set slots " + thisBegin + "-" + (slotIndex - 1));
clusterNode.close();
}
//set slave
for (int i = 0; i < master2slave.length; i++) {
String[] hostsInfo = master2slave[i].split("->");
rcm.addSlave(hostsInfo[0], hostsInfo[1], true);
}
}
private Jedis connect(String hostPort) {
String[] hostInfo = hostPort.split(":");
return new Jedis(hostInfo[0], Integer.parseInt(hostInfo[1]));
}
private void failOver(String slaveNode) throws Exception {
String[] masterHostInfo = slaveNode.split(":");
Jedis fixNode = new Jedis(masterHostInfo[0], Integer.parseInt(masterHostInfo[1]));
try {
String clusterNode;
int tryCount = 0;
do {
fixNode.clusterFailover();//不是100%起作用
Thread.sleep(500);
tryCount++;
clusterNode = fixNode.clusterNodes();
if (tryCount > 1) {
System.out.println(slaveNode + " tryCount:" + tryCount);
}
} while (clusterNode.contains("myself,slave"));//保证踢成功
System.out.println(slaveNode + " failover success!");
} catch (redis.clients.jedis.exceptions.JedisDataException e) {
String msg = e.getMessage();
if (msg.contains("CLUSTER FAILOVER to a slave")) {
System.out.println(slaveNode + " is master, You should send CLUSTER FAILOVER to a slave");
}
} catch (redis.clients.jedis.exceptions.JedisConnectionException e2) {
String msg = e2.getMessage();
if (msg.contains("connect timed out")) {
System.out.println(slaveNode + " : connect timed out");
}
}
fixNode.close();
}
private void fixSlotStable() {
Jedis fixNode = new Jedis(REDIS_HOST, REDIS_PORT);
byte[] coverSlot = new byte[16384];
List<Object> slotInfos = fixNode.clusterSlots();
fixNode.close();
for (Object slotInfo : slotInfos) {//检查删除节点是否含有slot
@SuppressWarnings("unchecked")
List<Object> slotInfoList = (List<Object>) slotInfo;
long begin = (Long) slotInfoList.get(0);
long end = (Long) slotInfoList.get(1);
for (int i = (int) begin; i <= end; i++) {
coverSlot[i] = 1;
fixNode.clusterSetSlotStable(i);//Clear any importing / migrating state from hash slot.
}
}
}
/**
* 使用指定主机修复没有cover的slot
* @param masterNode
*/
@SuppressWarnings("unchecked")
private void fixSlotCover(String masterNode) {
String[] masterHostInfo = masterNode.split(":");
Jedis fixNode = new Jedis(masterHostInfo[0], Integer.parseInt(masterHostInfo[1]));
byte[] coverSlot = new byte[16384];
List<Object> slotInfos = fixNode.clusterSlots();
for (Object slotInfo : slotInfos) {//检查删除节点是否含有slot
List<Object> slotInfoList = (List<Object>) slotInfo;
long begin = (Long) slotInfoList.get(0);
long end = (Long) slotInfoList.get(1);
// String host = new String((byte[]) hostInfo.get(0));
// long port = (long) hostInfo.get(1);
for (int i = (int) begin; i <= end; i++) {
coverSlot[i] = 1;
//fixNode.clusterSetSlotStable(i);//Clear any importing / migrating state from hash slot.
}
}
int begin = -1;
for (int i = 0; i < 16384; i++) {
if (coverSlot[i] == 0) {
fixNode.clusterAddSlots(i);
}
if (coverSlot[i] == 0 && begin == -1) {
begin = i;
} else if ((coverSlot[i] == 1 && begin > -1) || i == 16384) {
System.out.println("cluster_slots_fixed:" + begin + "-" + i);
begin = -1;
}
}
fixNode.close();
}
static class TestClass implements Runnable {
List<JSONObject> benckmarkData = new ArrayList<JSONObject>();
private int threadNum;
private String key;
private long offset;
private long dataCount;
public TestClass(int threadNum, String key, String value, long offset, long dataCount) {
this.key = key;
this.offset = offset;
this.dataCount = dataCount;
this.threadNum = threadNum;
}
public void run() {
long beginTime = System.currentTimeMillis(), lastCountTime = System.currentTimeMillis();
long lastCount = 0;
long lastBreakTime = 0;
int errorCount = 0;
for (long i = offset; i < dataCount; i++) {
try {
cluster.set(key + "_" + i, "");//节点可能关闭
} catch (Exception e) {
errorCount++;
if (lastBreakTime == 0) {
lastBreakTime = System.currentTimeMillis();
}
System.out.println("errorCount:" + errorCount);
}
if (lastBreakTime > 0) {
System.out
.println(threadNum + "reconnect use time:" + (System.currentTimeMillis() - lastBreakTime));
lastBreakTime = 0;
}
if (i % 5000 == 0) {
long useTime = System.currentTimeMillis() - lastCountTime;
System.out.println(threadNum + " set total:" + i + " speed:"
+ ((i - lastCount) / (useTime / 1000.0)));
lastCountTime = System.currentTimeMillis();
lastCount = i;
}
}
long useTime = System.currentTimeMillis() - beginTime;
System.out.println(threadNum + " set use time:" + useTime + " speed:"
+ ((dataCount - offset) / (useTime / 1000.0)));
}
}
private static void printHelp() {
System.out.println("java -jar redis-cluster-util-jar-with-dependencies.jar arg1 arg2 ...");
System.out.println("add-master \t:[host:port;host2:port2]add master list");
System.out.println("add-slave \t:[maser->slave;master2->slave2;...]master->slave");
System.out.println("analyze \t:" + MonitorUtil.helpInfo);
System.out.println("bakup-node \t:[file path]file path to save");
System.out
.println("benchmark \t:java -cp redis-cluster-util-jar-with-dependencies.jar com.jumei.util.Benchmark key value offset limit threadCount [all|set|get]");
System.out.println("check \t:check cluster status");
System.out.println("count \t:[keyPattern] count key count use keyPattern");
System.out.println("create \t:[maser->slave;master2->slave2;...] create cluster");
System.out.println("del \t:[key] del one key");
System.out.println("dels \t:[keyPattern][delKeyFileSavePath] del use keyPattern");
System.out.println("del-node \t:[host:port]");
System.out.println("del-node-id \t:[node-id]del node use id");
System.out.println("export \t:[keyPattern][outputFilePath] use * to export all");
System.out.println("exporth \t:[keyPattern][outputFilePath] export one host data, use * to export all");
System.out.println("export-keys \t:[key1,key2][outputFilePath]");
System.out.println("export-keys-file \t:[input keys file][outputFilePath]");
System.out.println("failover \t:[host:port;host2:port2] slave failover");
System.out.println("fix-slot-cover \t:[host:port] use one node to fix uncovered slot ");
System.out.println("fix-slot-stable \t:clear any importing / migrating state from hash slot");
System.out.println("flush \t:use flushall to clean cluster all data (be careful!)");
System.out.println("get \t:[key] get a string type value");
System.out
.println("import \t:[keyPattern][importFilePath] import if key not contains but list use mrege, use * to import all");
System.out.println("info \t:(ops,input,output,ram) query db info ");
System.out.println("keys \t:query use keyPattern");
System.out.println("keysize :count cluster all key");
System.out.println("monitor :[sleep second] monitor cluster status");
System.out.println("querys \t:query use pattern");
System.out.println("reshard \t:[host:port](master) [1-1024;1025-2048](slot range)");
System.out.println("raminfo \t:[host:port]default all node raminfo analysis");
System.out.println("set \t:[key][value] set a string type value");
System.out.println("others \t:use redis-cli to execute others command(linux only)");
}
/**
* 执行shell
* @param cmd
*/
public static void executeCmd(String cmd) {
if (null != cmd) {
System.out.println("exec cmd: " + cmd);
if (!SystemConf.isWindos) {
Runtime rt = Runtime.getRuntime();
try {
long beginTime = System.currentTimeMillis();
Process process = rt.exec(cmd);
StreamGobbler errorGobbler = new StreamGobbler(process.getErrorStream(), "ERROR");
StreamGobbler outputGobbler = new StreamGobbler(process.getInputStream(), "INFO");
errorGobbler.start();
outputGobbler.start();
System.out.println(cmd + " useTime:" + (System.currentTimeMillis() - beginTime));
while (errorGobbler.isAlive() || outputGobbler.isAlive()) {
Thread.sleep(1);
}
process.waitFor();
process.destroy();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
private void flushall() {
Iterator<Entry<String, JedisPool>> nodes = cluster.getClusterNodes().entrySet().iterator();
while (nodes.hasNext()) {
Entry<String, JedisPool> entry = nodes.next();
Jedis jedis = entry.getValue().getResource();
try {
jedis.flushAll();
System.out.println(entry.getKey() + " flushAll success");
} catch (Exception e) {
String msg = e.getMessage();
if (msg.contains("Read timed out")) {
System.out.println(entry.getKey() + " flushAll fail");
} else if (msg.contains("READONLY")) {//slave
} else {
e.printStackTrace();
}
}
}
}
@SuppressWarnings("unchecked")
private void check() {
Jedis clusterMaster = new Jedis(REDIS_HOST, REDIS_PORT, 10000);
String nodes = clusterMaster.clusterNodes();
Map<String, String> slave2host = new TreeMap<String, String>();
Map<String, String> host2slave = new TreeMap<String, String>();
Map<String, String> master2host = new TreeMap<String, String>();
Map<String, String> host2master = new TreeMap<String, String>();
Map<String, String> master2slave = new TreeMap<String, String>();
for (String node : nodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
String type = nodeInfo[2];
if (type.contains("master")) {
master2host.put(nodeInfo[0], nodeInfo[1]);
host2master.put(nodeInfo[1], nodeInfo[0]);
master2slave.put(nodeInfo[1], "warn");
}
}
for (String node : nodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
String type = nodeInfo[2];
if (type.contains("slave")) {
slave2host.put(nodeInfo[0], nodeInfo[1]);
host2slave.put(nodeInfo[1], nodeInfo[0]);
String masterHost = master2host.get(nodeInfo[3]);
if (null != masterHost) {
master2slave.put(masterHost, nodeInfo[1]);
} else {
System.out.println("master not found:" + nodeInfo[1]);
}
}
}
Iterator<Entry<String, String>> it = master2slave.entrySet().iterator();
StringBuffer slaveCheck = new StringBuffer("==== slave status check info ====");
boolean slaveCheckErrorFind = false;
while (it.hasNext()) {
Entry<String, String> entry = it.next();
String key = entry.getKey();
String value = entry.getValue();
if ("warn".equals(value)) {
slaveCheckErrorFind = true;
slaveCheck.append("\r\n" + entry.getKey() + " no slave");
continue;
}
String[] masterHostInfo = key.split(":");
String[] slaveHostInfo = value.split(":");
if (masterHostInfo[0].equals(slaveHostInfo[0]) || !masterHostInfo[1].equals(slaveHostInfo[1])) {//同一主机或端口不一致
slaveCheck.append("\r\n" + entry.getKey() + " slave ");
if (":0".equals(value)) {
slaveCheck.append("disconnected");
} else {
slaveCheck.append(value + " warn");
}
slaveCheckErrorFind = true;
} else {
slaveCheck.append("\r\n" + entry.getKey() + "->" + value);
}
}
if (slaveCheckErrorFind) {
slaveCheck.insert("==== slave status check info ====".length(), "error");
} else {
slaveCheck.insert("==== slave status check info ====".length(), "ok");
}
System.out.println(slaveCheck);
StringBuffer nodeFailCheck = new StringBuffer("==== node status check info ====");
boolean failCheckFind = false;
for (String node : nodes.split("\n")) {
if (node.contains("fail") || node.contains(":0")) {
nodeFailCheck.append("\r\n" + node);
failCheckFind = true;
}
}
if (!failCheckFind) {
nodeFailCheck.append("ok");
}
System.out.println(nodeFailCheck);
String clusterInf = clusterMaster.clusterInfo();
if (clusterInf.contains("cluster_state:ok") && clusterInf.contains("cluster_slots_ok:16384")) {
System.out.println("==== cluster info ====OK");
} else {
System.out.println("==== cluster info ====");
List<Object> slotInfos = clusterMaster.clusterSlots();
byte[] coverSlot = new byte[16384];
for (Object slotInfo : slotInfos) {//检查删除节点是否含有slot
List<Object> slotInfoList = (List<Object>) slotInfo;
long begin = (Long) slotInfoList.get(0);
long end = (Long) slotInfoList.get(1);
for (int i = (int) begin; i <= end; i++) {
coverSlot[i] = 1;
}
}
int begin = -1;
for (int i = 0; i < 16384; i++) {
/*if (coverSlot[i] == 0) {
System.out.println("cluster_slots_lost:" + i);
}*/
if (coverSlot[i] == 0 && begin == -1) {
if (i == 16383 || coverSlot[i + 1] == 1) {//只丢失了一个slot
System.out.println("cluster_slots_lost:" + i);
} else {
begin = i;
}
} else if ((coverSlot[i] == 1 && begin > -1)) {
System.out.println("cluster_slots_lost_range:" + begin + "-" + i);
begin = -1;
}
}
System.out.println(clusterInf);
}
clusterMaster.close();
}
private void addMaster(String[] args) {
Jedis clusterNode = new Jedis(REDIS_HOST, REDIS_PORT);
List<Jedis> addHostList = new ArrayList<Jedis>();
String nodes = null;
String[] addMasterNodes = trim(args[1]).split(";");
for (String addMasterNode : addMasterNodes) {
String[] addHostInfo = addMasterNode.split(":");
Jedis addNode = new Jedis(addHostInfo[0], Integer.parseInt(addHostInfo[1]));
try {
nodes = addNode.clusterNodes();
addHostList.add(addNode);
} catch (redis.clients.jedis.exceptions.JedisConnectionException e) {
System.out.println(addMasterNode + " connect error!");
continue;
}
int nodeCount = 0;
String addNodeId = null;
for (String node : nodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
if (node.contains("myself")) {
addNodeId = nodeInfo[0];
}
nodeCount++;
}
if (null == addNodeId) {
System.out.println("nodeId not found!");
return;
}
if (nodeCount > 1) {
System.out.println(addMasterNode + " this is not new node,use cmd to remove old node info");
System.out.println("cd /home/redis/" + addHostInfo[1]
+ " && rm -f dump.rdb appendonly.aof nodes.conf redis.log && service redis-node-"
+ addHostInfo[1] + " restart");
return;
}
}
for (Jedis addHost : addHostList) {
boolean meetSeccuss = false;
addHost.clusterMeet(REDIS_HOST, REDIS_PORT);
while (!meetSeccuss) {
try {
Thread.sleep(100);//估计需要100ms
} catch (InterruptedException e) {
e.printStackTrace();
}
nodes = clusterNode.clusterNodes();
if (nodes.contains(getJedisHostInfo(addHost))) {//从cluster里检查是否包含本信息
meetSeccuss = true;
}
if (!meetSeccuss) {
System.out.println(getJedisHostInfo(addHost) + " wait meet to seccuss ...");
} else {
System.out.println(getJedisHostInfo(addHost) + " add master seccussed!");
}
}
}
clusterNode.close();
}
private void addSlave(String masterNode, String slaveNode, boolean isCreateCluster) {
String[] masterHostInfo = masterNode.split(":");
Jedis master = new Jedis(masterHostInfo[0], Integer.parseInt(masterHostInfo[1]));
String nodes = master.clusterNodes();
String masterNodeId = null;
List<Jedis> clusterHostList = new ArrayList<Jedis>();
for (String node : nodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
String[] hostInfo = nodeInfo[1].split(":");
if (masterNode.equals(nodeInfo[1])) {
masterNodeId = nodeInfo[0];
}
int port = Integer.parseInt(hostInfo[1]);
if (port > 0) {
clusterHostList.add(new Jedis(hostInfo[0], port));
} else {
//System.out.println("not connected:" + node);//可能存在没有连上的节点
}
}
String[] addHostInfo = slaveNode.split(":");
Jedis slave = new Jedis(addHostInfo[0], Integer.parseInt(addHostInfo[1]));
nodes = slave.clusterNodes();
int nodeCount = 0;
String addNodeId = null;
for (String node : nodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
if (node.contains("myself")) {
addNodeId = nodeInfo[0];
}
nodeCount++;
}
if (null == addNodeId) {
System.out.println("nodeId not found");
slave.close();
master.close();
return;
}
if (nodeCount > 1 && !isCreateCluster) {
System.out.println(slaveNode + " this is not new node,use cmd to remove old node info");
System.out.println("cd /home/redis/" + addHostInfo[1]
+ " && rm -f dump.rdb appendonly.aof nodes.conf redis.log && service redis-node-" + addHostInfo[1]
+ " restart");
slave.close();
master.close();
return;
}
if (null == masterNodeId) {
System.out.println("not found master node with host:" + masterNode);
slave.close();
master.close();
return;
}
slave.clusterMeet(masterHostInfo[0], Integer.parseInt(masterHostInfo[1]));
boolean meetSeccuss = false;
while (!meetSeccuss) {
nodes = slave.clusterNodes();
if (nodes.contains(masterNodeId)) {
meetSeccuss = true;
}
if (!meetSeccuss) {
System.out.println(masterNode + " wait slave meet success ...");
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
try {
slave.clusterReplicate(masterNodeId);
} catch (redis.clients.jedis.exceptions.JedisDataException e) {
String msg = e.getMessage();
String print = "only replicate a master, not a slave";
if (msg.contains(print)) {
System.out.println(masterNode + " " + print);
} else {
e.printStackTrace();
}
}
//check
for (Jedis host : clusterHostList) {
boolean isAddSuccess = false;
do {
String checkNodes = null;
try {
checkNodes = host.clusterNodes();
} catch (redis.clients.jedis.exceptions.JedisConnectionException e) {
System.out.println(getJedisHostInfo(host) + " check slave connect error");
continue;
}
for (String node : checkNodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
if (slaveNode.equals(nodeInfo[1])) {
isAddSuccess = true;
break;
}
}
if (!isAddSuccess) {
System.out.println(getJedisHostInfo(host) + " wait nodes.conf sync ...");
try {
Thread.sleep(300);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
} while (!isAddSuccess);
}
master.close();
slave.close();
}
/**
* 删除从节点或没有slot的主节点,失败的节点可以删除
* @param delNode
*/
@SuppressWarnings("unchecked")
private void delNode(String delNode) {
Jedis checkMaster = new Jedis(REDIS_HOST, REDIS_PORT);
String clusterNodes = checkMaster.clusterNodes();
if (!clusterNodes.contains(delNode)) {
checkMaster.close();
System.out.println(delNode + " not in cluster!");
return;
}
if (!":0".equals(delNode)) {//掉线主机直接删除 ,TODO有bug
List<Object> slotInfos = checkMaster.clusterSlots();
for (Object slotInfo : slotInfos) {//检查删除节点是否含有slot
List<Object> slotInfoList = (List<Object>) slotInfo;
for (int i = 2; i < slotInfoList.size(); i++) {
List<Object> slotHostInfo = (List<Object>) slotInfoList.get(i);
String host = new String((byte[]) slotHostInfo.get(0));
long port = (Long) slotHostInfo.get(1);
String hostPort = host + ":" + port;
String isMasterCheck = hostPort + " master";
if ((hostPort.equals(delNode) && clusterNodes.contains(isMasterCheck))) {//master有slot不能删除
System.out.println(hostPort + " del fail contain slot " + slotInfoList.get(0) + "-"
+ slotInfoList.get(1));
checkMaster.close();
return;
}
}
}
}
List<String> delNodeIds = new ArrayList<String>();//:0 如果不在线是这种格式可能存在多个主机
List<Jedis> clusterHostList = new ArrayList<Jedis>();
for (String node : clusterNodes.split("\n")) {
String[] nodeInfo = node.split("\\s+");
String[] hostInfo = nodeInfo[1].split(":");
if (delNode.equals(nodeInfo[1])) {
delNodeIds.add(nodeInfo[0]);
} else {
clusterHostList.add(new Jedis(hostInfo[0], Integer.parseInt(hostInfo[1])));
}
}
if (delNodeIds.size() > 0) {
for (String delNodeId : delNodeIds) {
for (Jedis host : clusterHostList) {
String hostInfo = getJedisHostInfo(host);
try {
host.clusterForget(delNodeId);
System.out.println(hostInfo + " send forget sucess");
} catch (redis.clients.jedis.exceptions.JedisDataException e) {
String msg = e.getMessage();
if (null != msg && msg.contains("Unknown node")) {
System.out.println(hostInfo + " not found");
} else {
System.out.println(hostInfo + " send forget fail");
e.printStackTrace();
}
} catch (redis.clients.jedis.exceptions.JedisConnectionException e) {
String msg = e.getMessage();
if (null != msg && msg.contains("Connection refused")) {
System.out.println(hostInfo + " 主机连不上,请手动清空除该节点对应node配置,否则当前主机重新加入集群进会带入被踢出的节点信息!");
} else {
e.printStackTrace();
}
}
}
//check
for (Jedis host : clusterHostList) {
boolean isDelSuccess = false;
while (!isDelSuccess) {
String checkNodes = checkMaster.clusterNodes();
if (checkNodes.contains(delNodeId)) {
System.out.println(getJedisHostInfo(host) + " wait delete success ...");
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
} else {
isDelSuccess = true;
}
}
}
String[] delHostInfo = delNode.split(":");
int port = Integer.parseInt(delHostInfo[1]);
if (port > 0) {
try {
Jedis jedis = new Jedis(delHostInfo[0], Integer.parseInt(delHostInfo[1]));
jedis.shutdown();
System.out.println(delNode + " has shutdown!");
jedis.close();
} catch (redis.clients.jedis.exceptions.JedisConnectionException e) {
System.out.println(delNode + ",无法连接,请手动进行关闭!");
}
}
System.out.println(delNode + " delete success, please remove nodes.conf file!");
}
}
checkMaster.close();
}
private String getJedisHostInfo(Jedis host) {
return host.getClient().getHost() + ":" + host.getClient().getPort();
}
private void opt(String[] args) {
JedisCluster jedisCluster;
Set<HostAndPort> jedisClusterNodes;
JedisPoolConfig pool;
jedisClusterNodes = new HashSet<HostAndPort>();
jedisClusterNodes.add(new HostAndPort(REDIS_HOST, REDIS_PORT));
pool = new JedisPoolConfig();
pool.setMaxTotal(100);
jedisCluster = new JedisCluster(jedisClusterNodes, pool);
long beginTime = System.currentTimeMillis();
if ("del".equals(args[0])) {
for (int i = 1; i < args.length; i++) {
jedisCluster.del(args[i]);
}
} else if ("set".equals(args[0])) {
jedisCluster.set(args[1], args[2]);
} else if ("get".equals(args[0])) {
for (int i = 1; i < args.length; i++) {
System.out.println(args[i] + "->" + jedisCluster.get(args[i]));
}
}
System.out.println("opt useTime->" + ((System.currentTimeMillis() - beginTime)) + "ms ");
try {
jedisCluster.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public Jedis getOldRedis(String key) {
return oldRedisMap.get(JedisClusterCRC16.getCRC16(key) % 16384);
}
/**
* 恢复关注队列和粉丝队列
*/
private void followRestoreByUids(String uids) {
String[] uidsArray = uids.split(",");
String KEY_USER_ATTENTION_ZSET = "u_a_";
String KEY_USER_FANS_ZSET = "u_f_";
for (String uid : uidsArray) {
if (uid.length() == 0) {
continue;//无效的uid
}
try {
//恢复关注
String keyAttention = KEY_USER_ATTENTION_ZSET + uid;//此用户的关注队列
int slot = JedisClusterCRC16.getCRC16(keyAttention) % 16384;
Jedis oldJedis = oldRedisMap.get(slot);
Set<Tuple> attentionUids = oldJedis.zrangeWithScores(keyAttention, 0, -1);
if (attentionUids != null && attentionUids.size() > 0) {
for (Tuple t : attentionUids) {
String attentionUid = t.getElement(); //被关注人的uid
double score = t.getScore();
cluster.zadd(keyAttention, score, attentionUid);
cluster.zadd(KEY_USER_FANS_ZSET + attentionUid, score, uid);//加入被关注人的粉丝队列
}
}
//恢复粉丝
String keyFans = KEY_USER_FANS_ZSET + uid;//此用户的粉丝队列
int slot2222 = JedisClusterCRC16.getCRC16(keyFans) % 16384;
Jedis oldJedis2222 = oldRedisMap.get(slot2222);
Set<Tuple> fansUids = oldJedis2222.zrangeWithScores(keyFans, 0, -1);
if (fansUids != null && fansUids.size() > 0) {
for (Tuple t : fansUids) {
String fansUid = t.getElement(); //粉丝的uid
double score = t.getScore();
cluster.zadd(keyFans, score, fansUid);
cluster.zadd(KEY_USER_ATTENTION_ZSET + fansUid, score, uid);//加入粉丝的关注队列
}
}
} catch (Exception e) {
System.out.println("followRestoreByUids 异常,当前uid:" + uid);
e.printStackTrace();
}
}
}
/**
* 恢复帖子点赞
*/
private void restoreShowPraise(String uids) {
String[] uidsArray = uids.split(",");
String KEY_USER_SHOW_ZSET = "u_s_";//普通帖
String KEY_USER_SHOW_VIDEO_ZSET = "u_s_v_";//视频帖
String KEY_USER_COUNSEL_SHOW_ZSET = "u_counsel_s_";//专栏帖
String KEY_SHOW_PRAISE_SET = "s_p_";//帖子点赞人队列
for (String uid : uidsArray) {
if (uid.length() == 0) {//无效的uid
continue;
}
try {
String key = KEY_USER_SHOW_ZSET + uid;
//遍历普通帖
Set<String> showIdsNormal = oldRedisMap.get(JedisClusterCRC16.getCRC16(key) % 16384).zrange(key, 0, -1);
if (showIdsNormal != null && showIdsNormal.size() > 0) {
for (String showId : showIdsNormal) {
Set<Tuple> tuplesPraise = getOldRedis(KEY_SHOW_PRAISE_SET + showId).zrangeWithScores(
KEY_SHOW_PRAISE_SET + showId, 0, -1);
if (tuplesPraise != null && tuplesPraise.size() > 0) {
for (Tuple t : tuplesPraise) {
String praiseUserId = t.getElement();
double praiseTime = t.getScore();
cluster.zadd(KEY_SHOW_PRAISE_SET + showId, praiseTime, praiseUserId);
}
}
}
}
//遍历视频帖
Set<String> showIdsVideo = getOldRedis(KEY_USER_SHOW_VIDEO_ZSET + uid).zrange(
KEY_USER_SHOW_VIDEO_ZSET + uid, 0, -1);
if (showIdsVideo != null && showIdsVideo.size() > 0) {
for (String showId : showIdsVideo) {
Set<Tuple> tuplesPraise = getOldRedis(KEY_SHOW_PRAISE_SET + showId).zrangeWithScores(
KEY_SHOW_PRAISE_SET + showId, 0, -1);
if (tuplesPraise != null && tuplesPraise.size() > 0) {
for (Tuple t : tuplesPraise) {
String praiseUserId = t.getElement();
double praiseTime = t.getScore();
cluster.zadd(KEY_SHOW_PRAISE_SET + showId, praiseTime, praiseUserId);
}
}
}
}
//遍历专栏帖
Set<String> showIdsCounsel = getOldRedis(KEY_USER_COUNSEL_SHOW_ZSET + uid).zrange(
KEY_USER_COUNSEL_SHOW_ZSET + uid, 0, -1);
if (showIdsCounsel != null && showIdsCounsel.size() > 0) {
for (String showId : showIdsCounsel) {
Set<Tuple> tuplesPraise = getOldRedis(KEY_SHOW_PRAISE_SET + showId).zrangeWithScores(
KEY_SHOW_PRAISE_SET + showId, 0, -1);
if (tuplesPraise != null && tuplesPraise.size() > 0) {
for (Tuple t : tuplesPraise) {
String praiseUserId = t.getElement();
double praiseTime = t.getScore();
cluster.zadd(KEY_SHOW_PRAISE_SET + showId, praiseTime, praiseUserId);
}
}
}
}
} catch (Exception e) {
System.out.println("程序出现异常!当前uid:" + uid + ",异常信息:" + e.getMessage());
}
}
}
static Map<Integer, String> oldRedisSlot2Host = new HashMap<Integer, String>();
static Map<Integer, Jedis> oldRedisMap = new HashMap<Integer, Jedis>();
static {
String host = null;
Integer port = null;
try {
File file = new File(SystemConf.confFileDir + "/oldRedisSlot2Host.txt");
if (file.isFile()) {
BufferedReader br = new BufferedReader(new FileReader(file));
String data;
//10.0.228.31:29006#5243-5897
while ((data = br.readLine()) != null) {
String[] info = data.split(":");
if (info.length == 3) {
host = info[0];
port = Integer.valueOf(info[1]);
Jedis jedis = new Jedis(host, port);
jedis.info();//测试一下是否可通知
String[] soltInfo = info[2].split("-");
int begin = Integer.valueOf(soltInfo[0]);
int end = Integer.valueOf(soltInfo[1]);
for (int i = begin; i <= end; i++) {
oldRedisMap.put(i, jedis);
}
}
}
br.close();
}
} catch (Exception e) {
e.printStackTrace();
System.out.println("nodeError host:" + host + " port:" + port);
}
}
private void restoreUserHash(String uids) {
String[] uidsArray = uids.split(",");
for (String uid : uidsArray) {
if (uid.length() == 0) {//无效果的uid
continue;
}
String key = "u_" + uid;
int slot = JedisClusterCRC16.getCRC16(key) % 16384;
Jedis oldJedis = oldRedisMap.get(slot);
Map<String, String> oldData = oldJedis.hgetAll(key);//这是之前的老数据
if (oldData != null) {
Map<String, String> newData = cluster.hgetAll(key);//这是当前的数据
restoreUserHashSetData(key, "copper", oldData, newData);//恢复铜币
restoreUserHashSetData(key, "gold", oldData, newData);//恢复金币
restoreUserHashSetData(key, "live_empirical_value", oldData, newData);//恢复直播经验值
restoreUserHashSetData(key, "praise_count", oldData, newData);//恢复点赞数
//恢复vip
String oldVip = oldData.get("vip");
String newVip = newData.get("vip");
if (null != newVip || "0".equals(newVip)) {
if (oldVip != null && !"".equals(oldVip) && !"0".equals(oldVip)) {
Map<String, String> userHash = new HashMap<String, String>();
userHash.put("vip", oldVip);
cluster.hmset(key, userHash);
}
}
//恢复认证信息
String oldRecommend_desc = oldData.get("recommend_desc");
String newRecommend_desc = newData.get("recommend_desc");
if (null == newRecommend_desc || newRecommend_desc.length() == 0) {
if (oldRecommend_desc != null && oldRecommend_desc.length() > 0) {
Map<String, String> userHash = new HashMap<String, String>();
userHash.put("recommend_desc", oldRecommend_desc);
cluster.hmset(key, userHash);
}
}
}
}
}
/**
* 根据老数据和新数据,设置最终的数据,主要是针对铜币、金币、经验值和点赞数
*/
private void restoreUserHashSetData(String key, String propertyName, Map<String, String> oldData,
Map<String, String> newData) {
String propertyOldData = oldData.get(propertyName);
if (propertyOldData != null && !"".equals(propertyOldData)) {
String properNewData = newData.get(propertyName);
long total = Long.valueOf(propertyOldData);
if (properNewData != null && !"".equals(properNewData)) {
total += Long.valueOf(properNewData);
}
Map<String, String> userHash = new HashMap<String, String>();
userHash.put(propertyName, total + "");
cluster.hmset(key, userHash);
}
}
}
| 添加导出错误日志
| redis-cluster-manager/src/main/java/com/huit/util/RedisClusterManager.java | 添加导出错误日志 |
|
Java | apache-2.0 | 018e57d80478085b190c5a257ff7172b869cfa57 | 0 | asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,da1z/intellij-community,signed/intellij-community,semonte/intellij-community,signed/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,semonte/intellij-community,xfournet/intellij-community,signed/intellij-community,allotria/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,asedunov/intellij-community,FHannes/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,da1z/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,semonte/intellij-community,da1z/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,apixandru/intellij-community,semonte/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,FHannes/intellij-community,apixandru/intellij-community,da1z/intellij-community,suncycheng/intellij-community,semonte/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,FHannes/intellij-community,da1z/intellij-community,da1z/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,xfournet/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,signed/intellij-community,allotria/intellij-community,da1z/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,asedunov/intellij-community,FHannes/intellij-community,FHannes/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,signed/intellij-community,xfournet/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,signed/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,ibinti/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,signed/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,allotria/intellij-community,apixandru/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,da1z/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,da1z/intellij-community,vvv1559/intellij-community,semonte/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,xfournet/intellij-community,da1z/intellij-community,allotria/intellij-community,suncycheng/intellij-community,semonte/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,da1z/intellij-community,allotria/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,signed/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,semonte/intellij-community,vvv1559/intellij-community,signed/intellij-community,ibinti/intellij-community,da1z/intellij-community,allotria/intellij-community,ibinti/intellij-community,asedunov/intellij-community,apixandru/intellij-community,xfournet/intellij-community,asedunov/intellij-community,xfournet/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,semonte/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,signed/intellij-community,ibinti/intellij-community,allotria/intellij-community,apixandru/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,allotria/intellij-community,allotria/intellij-community,vvv1559/intellij-community,allotria/intellij-community,signed/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,signed/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community | /*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.intellij.lang.regexp.intention;
import com.intellij.ide.util.PropertiesComponent;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.CustomShortcutSet;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.TransactionGuard;
import com.intellij.openapi.application.TransactionId;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.event.DocumentAdapter;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.fileTypes.PlainTextFileType;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.Balloon;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiLanguageInjectionHost;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil;
import com.intellij.ui.EditorTextField;
import com.intellij.ui.JBColor;
import com.intellij.ui.components.JBLabel;
import com.intellij.util.Alarm;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.UIUtil;
import org.intellij.lang.regexp.RegExpLanguage;
import org.intellij.lang.regexp.RegExpModifierProvider;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import java.awt.*;
import java.util.regex.Pattern;
/**
* @author Konstantin Bulenkov
*/
public class CheckRegExpForm {
private static final String LAST_EDITED_REGEXP = "last.edited.regexp";
private static final JBColor BACKGROUND_COLOR_MATCH = new JBColor(0xe7fadb, 0x445542);
private static final JBColor BACKGROUND_COLOR_NOMATCH = new JBColor(0xffb1a0, 0x6e2b28);
private final PsiFile myRegexpFile;
private EditorTextField mySampleText; //TODO[kb]: make it multiline
private EditorTextField myRegExp;
private JPanel myRootPanel;
private JBLabel myMessage;
private Project myProject;
public CheckRegExpForm(@NotNull PsiFile regexpFile) {
myRegexpFile = regexpFile;
}
private void createUIComponents() {
myProject = myRegexpFile.getProject();
Document document = PsiDocumentManager.getInstance(myProject).getDocument(myRegexpFile);
myRegExp = new EditorTextField(document, myProject, RegExpLanguage.INSTANCE.getAssociatedFileType());
final String sampleText = PropertiesComponent.getInstance(myProject).getValue(LAST_EDITED_REGEXP, "Sample Text");
mySampleText = new EditorTextField(sampleText, myProject, PlainTextFileType.INSTANCE) {
@Override
protected void updateBorder(@NotNull EditorEx editor) {
setupBorder(editor);
}
};
mySampleText.setOneLineMode(false);
int preferredWidth = Math.max(JBUI.scale(250), myRegExp.getPreferredSize().width);
myRegExp.setPreferredWidth(preferredWidth);
mySampleText.setPreferredWidth(preferredWidth);
myRootPanel = new JPanel(new BorderLayout()) {
Disposable disposable;
@Override
public void addNotify() {
super.addNotify();
disposable = Disposer.newDisposable();
IdeFocusManager.getGlobalInstance().requestFocus(mySampleText, true);
new AnAction(){
@Override
public void actionPerformed(AnActionEvent e) {
IdeFocusManager.findInstance().requestFocus(myRegExp.getFocusTarget(), true);
}
}.registerCustomShortcutSet(CustomShortcutSet.fromString("shift TAB"), mySampleText);
final Alarm updater = new Alarm(Alarm.ThreadToUse.POOLED_THREAD, disposable);
Runnable scheduler = () -> {
TransactionId transactionId = TransactionGuard.getInstance().getContextTransaction();
updater.cancelAllRequests();
if (!updater.isDisposed()) {
updater.addRequest(() -> {
Boolean correct = isMatchingText(myRegexpFile, mySampleText.getText());
TransactionGuard.getInstance().submitTransaction(myProject, transactionId, () -> setBalloonState(correct));
}, 200);
}
};
DocumentAdapter documentListener = new DocumentAdapter() {
@Override
public void documentChanged(DocumentEvent e) {
scheduler.run();
}
};
myRegExp.addDocumentListener(documentListener);
mySampleText.addDocumentListener(documentListener);
scheduler.run();
mySampleText.selectAll();
}
@Override
public void removeNotify() {
super.removeNotify();
Disposer.dispose(disposable);
PropertiesComponent.getInstance(myProject).setValue(LAST_EDITED_REGEXP, mySampleText.getText());
}
};
myRootPanel.setBorder(JBUI.Borders.empty(UIUtil.DEFAULT_VGAP, UIUtil.DEFAULT_HGAP));
}
private void setBalloonState(Boolean correct) {
mySampleText.setBackground(correct != null && correct ? BACKGROUND_COLOR_MATCH : BACKGROUND_COLOR_NOMATCH);
myMessage.setText(correct == null ? "Pattern is too complex" : correct ? "Matches!" : "No match");
myRootPanel.revalidate();
Balloon balloon = JBPopupFactory.getInstance().getParentBalloonFor(myRootPanel);
if (balloon != null && !balloon.isDisposed()) balloon.revalidate();
}
@NotNull
public JComponent getPreferredFocusedComponent() {
return mySampleText;
}
@NotNull
public JPanel getRootPanel() {
return myRootPanel;
}
@TestOnly
public static boolean isMatchingTextTest(@NotNull PsiFile regexpFile, @NotNull String sampleText) {
Boolean result = isMatchingText(regexpFile, sampleText);
return result != null && result;
}
private static Boolean isMatchingText(@NotNull final PsiFile regexpFile, @NotNull String sampleText) {
final String regExp = regexpFile.getText();
Integer patternFlags = ApplicationManager.getApplication().runReadAction((Computable<Integer>)() -> {
PsiLanguageInjectionHost host = InjectedLanguageUtil.findInjectionHost(regexpFile);
int flags = 0;
if (host != null) {
for (RegExpModifierProvider provider : RegExpModifierProvider.EP.allForLanguage(host.getLanguage())) {
flags = provider.getFlags(host, regexpFile);
if (flags > 0) break;
}
}
return flags;
});
try {
//noinspection MagicConstant
return Pattern.compile(regExp, patternFlags).matcher(StringUtil.newBombedCharSequence(sampleText, 1000)).matches();
} catch (ProcessCanceledException pc) {
return null;
}
catch (Exception ignore) {}
return false;
}
}
| RegExpSupport/src/org/intellij/lang/regexp/intention/CheckRegExpForm.java | /*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.intellij.lang.regexp.intention;
import com.intellij.ide.util.PropertiesComponent;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.CustomShortcutSet;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.event.DocumentAdapter;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.fileTypes.PlainTextFileType;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.Balloon;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiLanguageInjectionHost;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil;
import com.intellij.ui.EditorTextField;
import com.intellij.ui.JBColor;
import com.intellij.ui.components.JBLabel;
import com.intellij.util.Alarm;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.UIUtil;
import org.intellij.lang.regexp.RegExpLanguage;
import org.intellij.lang.regexp.RegExpModifierProvider;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import java.awt.*;
import java.util.regex.Pattern;
/**
* @author Konstantin Bulenkov
*/
public class CheckRegExpForm {
private static final String LAST_EDITED_REGEXP = "last.edited.regexp";
private static final JBColor BACKGROUND_COLOR_MATCH = new JBColor(0xe7fadb, 0x445542);
private static final JBColor BACKGROUND_COLOR_NOMATCH = new JBColor(0xffb1a0, 0x6e2b28);
private final PsiFile myRegexpFile;
private EditorTextField mySampleText; //TODO[kb]: make it multiline
private EditorTextField myRegExp;
private JPanel myRootPanel;
private JBLabel myMessage;
private Project myProject;
public CheckRegExpForm(@NotNull PsiFile regexpFile) {
myRegexpFile = regexpFile;
}
private void createUIComponents() {
myProject = myRegexpFile.getProject();
Document document = PsiDocumentManager.getInstance(myProject).getDocument(myRegexpFile);
myRegExp = new EditorTextField(document, myProject, RegExpLanguage.INSTANCE.getAssociatedFileType());
final String sampleText = PropertiesComponent.getInstance(myProject).getValue(LAST_EDITED_REGEXP, "Sample Text");
mySampleText = new EditorTextField(sampleText, myProject, PlainTextFileType.INSTANCE) {
@Override
protected void updateBorder(@NotNull EditorEx editor) {
setupBorder(editor);
}
};
mySampleText.setOneLineMode(false);
int preferredWidth = Math.max(JBUI.scale(250), myRegExp.getPreferredSize().width);
myRegExp.setPreferredWidth(preferredWidth);
mySampleText.setPreferredWidth(preferredWidth);
myRootPanel = new JPanel(new BorderLayout()) {
Disposable disposable;
@Override
public void addNotify() {
super.addNotify();
disposable = Disposer.newDisposable();
IdeFocusManager.getGlobalInstance().requestFocus(mySampleText, true);
new AnAction(){
@Override
public void actionPerformed(AnActionEvent e) {
IdeFocusManager.findInstance().requestFocus(myRegExp.getFocusTarget(), true);
}
}.registerCustomShortcutSet(CustomShortcutSet.fromString("shift TAB"), mySampleText);
final Alarm updater = new Alarm(Alarm.ThreadToUse.POOLED_THREAD, disposable);
DocumentAdapter documentListener = new DocumentAdapter() {
@Override
public void documentChanged(DocumentEvent e) {
updater.cancelAllRequests();
if (!updater.isDisposed()) {
updater.addRequest(CheckRegExpForm.this::updateBalloon, 200);
}
}
};
myRegExp.addDocumentListener(documentListener);
mySampleText.addDocumentListener(documentListener);
updateBalloon();
mySampleText.selectAll();
}
@Override
public void removeNotify() {
super.removeNotify();
Disposer.dispose(disposable);
PropertiesComponent.getInstance(myProject).setValue(LAST_EDITED_REGEXP, mySampleText.getText());
}
};
myRootPanel.setBorder(JBUI.Borders.empty(UIUtil.DEFAULT_VGAP, UIUtil.DEFAULT_HGAP));
}
@NotNull
public JComponent getPreferredFocusedComponent() {
return mySampleText;
}
@NotNull
public JPanel getRootPanel() {
return myRootPanel;
}
private void updateBalloon() {
final Boolean correct = isMatchingText(myRegexpFile, mySampleText.getText());
ApplicationManager.getApplication().invokeLater(() -> {
mySampleText.setBackground(correct != null && correct ? BACKGROUND_COLOR_MATCH : BACKGROUND_COLOR_NOMATCH);
myMessage.setText(correct == null ? "Pattern is too complex" : correct ? "Matches!" : "No match");
myRootPanel.revalidate();
Balloon balloon = JBPopupFactory.getInstance().getParentBalloonFor(myRootPanel);
if (balloon != null && !balloon.isDisposed()) balloon.revalidate();
});
}
@TestOnly
public static boolean isMatchingTextTest(@NotNull PsiFile regexpFile, @NotNull String sampleText) {
Boolean result = isMatchingText(regexpFile, sampleText);
return result != null && result;
}
private static Boolean isMatchingText(@NotNull final PsiFile regexpFile, @NotNull String sampleText) {
final String regExp = regexpFile.getText();
Integer patternFlags = ApplicationManager.getApplication().runReadAction(new Computable<Integer>() {
@Override
public Integer compute() {
PsiLanguageInjectionHost host = InjectedLanguageUtil.findInjectionHost(regexpFile);
int flags = 0;
if (host != null) {
for (RegExpModifierProvider provider : RegExpModifierProvider.EP.allForLanguage(host.getLanguage())) {
flags = provider.getFlags(host, regexpFile);
if (flags > 0) break;
}
}
return flags;
}
});
try {
//noinspection MagicConstant
return Pattern.compile(regExp, patternFlags).matcher(StringUtil.newBombedCharSequence(sampleText, 1000)).matches();
} catch (ProcessCanceledException pc) {
return null;
}
catch (Exception ignore) {}
return false;
}
}
| RegexpChecker: continue transaction IDEA-165426
| RegExpSupport/src/org/intellij/lang/regexp/intention/CheckRegExpForm.java | RegexpChecker: continue transaction IDEA-165426 |
|
Java | apache-2.0 | 550501c280a59cfc9d9824cf496dc3e25cc77f0a | 0 | apache/directory-project | /*
* Copyright 2005 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.ldap.common.codec;
/**
* @author <a href="mailto:[email protected]">Apache Directory Project</a>
*/
public class LdapConstants {
/** The scope constants */
public static final int SCOPE_BASE_OBJECT = 0;
public static final int SCOPE_SINGLE_LEVEL = 1;
public static final int SCOPE_WHOLE_SUBTREE = 2;
/** The DerefAlias constants */
public static final int NEVER_DEREF_ALIASES = 0;
public static final int DEREF_IN_SEARCHING = 1;
public static final int DEREF_FINDING_BASE_OBJ = 2;
public static final int DEREF_ALWAYS = 3;
/** The operations */
public static final int OPERATION_ADD = 0;
public static final int OPERATION_DELETE = 1;
public static final int OPERATION_REPLACE = 2;
/** The filters */
public static final int EQUALITY_MATCH_FILTER = 0;
public static final int GREATER_OR_EQUAL_FILTER = 1;
public static final int LESS_OR_EQUAL_FILTER = 2;
public static final int APPROX_MATCH_FILTER = 3;
/** LDAP contextual tags */
public static final byte UNBIND_REQUEST_TAG = 0x42;
public static final byte DEL_REQUEST_TAG = 0x4A;
public static final byte ABANDON_REQUEST_TAG = 0x50;
public static final byte BIND_REQUEST_TAG = 0x60;
public static final byte BIND_RESPONSE_TAG = 0x61;
public static final byte SEARCH_REQUEST_TAG = 0x63;
public static final byte SEARCH_RESULT_ENTRY_TAG = 0x64;
public static final byte SEARCH_RESULT_DONE_TAG = 0x65;
public static final byte MODIFY_REQUEST_TAG = 0x66;
public static final byte MODIFY_RESPONSE_TAG = 0x67;
public static final byte ADD_REQUEST_TAG = 0x68;
public static final byte ADD_RESPONSE_TAG = 0x69;
public static final byte DEL_RESPONSE_TAG = 0x6B;
public static final byte MODIFY_DN_REQUEST_TAG = 0x6C;
public static final byte MODIFY_DN_RESPONSE_TAG = 0x6D;
public static final byte COMPARE_REQUEST_TAG = 0x6E;
public static final byte COMPARE_RESPONSE_TAG = 0x6F;
public static final byte SEARCH_RESULT_REFERENCE_TAG = 0x73;
public static final byte EXTENDED_REQUEST_TAG = 0x77;
public static final byte EXTENDED_RESPONSE_TAG = 0x78;
// The following tags are ints, because bytes above 127 are negative
// numbers, and we can't use them as array indexes.
public static final int BIND_REQUEST_SIMPLE_TAG = 0x80;
public static final int EXTENDED_REQUEST_NAME_TAG = 0x80;
public static final int MODIFY_DN_REQUEST_NEW_SUPERIOR_TAG = 0x80;
public static final int SEARCH_SUBSTRINGS_INITIAL_TAG = 0x80;
public static final int EXTENDED_REQUEST_VALUE_TAG = 0x81;
public static final int SEARCH_MATCHING_RULE_TAG = 0x81;
public static final int SEARCH_SUBSTRINGS_ANY_TAG = 0x81;
public static final int SEARCH_TYPE_TAG = 0x82;
public static final int SEARCH_SUBSTRINGS_FINAL_TAG = 0x82;
public static final int SEARCH_MATCH_VALUE_TAG = 0x83;
public static final int DN_ATTRIBUTES_FILTER_TAG = 0x84;
public static final int SERVER_SASL_CREDENTIAL_TAG = 0x87;
public static final int PRESENT_FILTER_TAG = 0x87;
public static final int EXTENDED_RESPONSE_RESPONSE_NAME_TAG = 0x8A;
public static final int EXTENDED_RESPONSE_RESPONSE_TAG = 0x8B;
public static final int CONTROLS_TAG = 0xA0;
public static final int AND_FILTER_TAG = 0xA0;
public static final int OR_FILTER_TAG = 0xA1;
public static final int NOT_FILTER_TAG = 0xA2;
public static final int BIND_REQUEST_SASL_TAG = 0xA3;
public static final int LDAP_RESULT_REFERRAL_SEQUENCE_TAG = 0xA3;
public static final int EQUALITY_MATCH_FILTER_TAG = 0xA3;
public static final int SUBSTRINGS_FILTER_TAG = 0xA4;
public static final int GREATER_OR_EQUAL_FILTER_TAG = 0xA5;
public static final int LESS_OR_EQUAL_FILTER_TAG = 0xA6;
public static final int APPROX_MATCH_FILTER_TAG = 0xA8;
public static final int EXTENSIBLE_MATCH_FILTER_TAG = 0xA9;
// The messages types
public static final int ABANDON_REQUEST = 0;
public static final int ADD_REQUEST = 1;
public static final int ADD_RESPONSE = 2;
public static final int BIND_REQUEST = 3;
public static final int BIND_RESPONSE = 4;
public static final int COMPARE_REQUEST = 5;
public static final int COMPARE_RESPONSE = 6;
public static final int DEL_REQUEST = 7;
public static final int DEL_RESPONSE = 8;
public static final int EXTENDED_REQUEST = 9;
public static final int EXTENDED_RESPONSE = 10;
public static final int MODIFYDN_REQUEST = 11;
public static final int MODIFYDN_RESPONSE = 12;
public static final int MODIFY_REQUEST = 13;
public static final int MODIFY_RESPONSE = 14;
public static final int SEARCH_REQUEST = 15;
public static final int SEARCH_RESULT_DONE = 16;
public static final int SEARCH_RESULT_ENTRY = 17;
public static final int SEARCH_RESULT_REFERENCE = 18;
public static final int UNBIND_REQUEST = 19;
public static final int UNKNOWN = -1; // Just in case ...
}
| common/ldap/src/main/java/org/apache/ldap/common/codec/LdapConstants.java | /*
* Copyright 2005 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.ldap.common.codec;
/**
* @author <a href="mailto:[email protected]">Apache Directory Project</a>
*/
public class LdapConstants {
/** The scope constants */
public static final int SCOPE_BASE_OBJECT = 0;
public static final int SCOPE_SINGLE_LEVEL = 1;
public static final int SCOPE_WHOLE_SUBTREE = 2;
/** The DerefAlias constants */
public static final int NEVER_DEREF_ALIASES = 0;
public static final int DEREF_IN_SEARCHING = 1;
public static final int DEREF_FINDING_BASE_OBJ = 2;
public static final int DEREF_ALWAYS = 3;
/** The operations */
public static final int OPERATION_ADD = 0;
public static final int OPERATION_DELETE = 1;
public static final int OPERATION_REPLACE = 2;
/** The filters */
public static final int EQUALITY_MATCH_FILTER = 0;
public static final int GREATER_OR_EQUAL_FILTER = 1;
public static final int LESS_OR_EQUAL_FILTER = 2;
public static final int APPROX_MATCH_FILTER = 3;
/** LDAP contextual tags */
public static final byte UNBIND_REQUEST_TAG = 0x42;
public static final byte DEL_REQUEST_TAG = 0x4A;
public static final byte ABANDON_REQUEST_TAG = 0x50;
public static final byte BIND_REQUEST_TAG = 0x60;
public static final byte BIND_RESPONSE_TAG = 0x61;
public static final byte SEARCH_REQUEST_TAG = 0x63;
public static final byte SEARCH_RESULT_ENTRY_TAG = 0x64;
public static final byte SEARCH_RESULT_DONE_TAG = 0x65;
public static final byte MODIFY_REQUEST_TAG = 0x66;
public static final byte MODIFY_RESPONSE_TAG = 0x67;
public static final byte ADD_REQUEST_TAG = 0x68;
public static final byte ADD_RESPONSE_TAG = 0x69;
public static final byte DEL_RESPONSE_TAG = 0x6B;
public static final byte MODIFY_DN_REQUEST_TAG = 0x6C;
public static final byte MODIFY_DN_RESPONSE_TAG = 0x6D;
public static final byte COMPARE_REQUEST_TAG = 0x6E;
public static final byte COMPARE_RESPONSE_TAG = 0x6F;
public static final byte SEARCH_RESULT_REFERENCE_TAG = 0x73;
public static final byte EXTENDED_REQUEST_TAG = 0x77;
public static final byte EXTENDED_RESPONSE_TAG = 0x78;
// The following tags are ints, because bytes above 127 are negative
// numbers, and we can't use them as array indexes.
public static final int BIND_REQUEST_SIMPLE_TAG = 0x80;
public static final int EXTENDED_REQUEST_NAME_TAG = 0x80;
public static final int MODIFY_DN_REQUEST_NEW_SUPERIOR_TAG = 0x80;
public static final int SEARCH_SUBSTRINGS_INITIAL_TAG = 0x80;
public static final int EXTENDED_REQUEST_VALUE_TAG = 0x81;
public static final int SEARCH_MATCHING_RULE_TAG = 0x81;
public static final int SEARCH_SUBSTRINGS_ANY_TAG = 0x81;
public static final int SEARCH_TYPE_TAG = 0x82;
public static final int SEARCH_SUBSTRINGS_FINAL_TAG = 0x82;
public static final int SEARCH_MATCH_VALUE_TAG = 0x83;
public static final int BIND_REQUEST_SASL_TAG = 0x83;
public static final int LDAP_RESULT_REFERRAL_SEQUENCE_TAG = 0xa3;
public static final int DN_ATTRIBUTES_FILTER_TAG = 0x84;
public static final int SERVER_SASL_CREDENTIAL_TAG = 0x87;
public static final int PRESENT_FILTER_TAG = 0x87;
public static final int EXTENDED_RESPONSE_RESPONSE_NAME_TAG = 0x8A;
public static final int EXTENDED_RESPONSE_RESPONSE_TAG = 0x8B;
public static final int CONTROLS_TAG = 0xA0;
public static final int AND_FILTER_TAG = 0xA0;
public static final int OR_FILTER_TAG = 0xA1;
public static final int NOT_FILTER_TAG = 0xA2;
public static final int EQUALITY_MATCH_FILTER_TAG = 0xA3;
public static final int SUBSTRINGS_FILTER_TAG = 0xA4;
public static final int GREATER_OR_EQUAL_FILTER_TAG = 0xA5;
public static final int LESS_OR_EQUAL_FILTER_TAG = 0xA6;
public static final int APPROX_MATCH_FILTER_TAG = 0xA8;
public static final int EXTENSIBLE_MATCH_FILTER_TAG = 0xA9;
// The messages types
public static final int ABANDON_REQUEST = 0;
public static final int ADD_REQUEST = 1;
public static final int ADD_RESPONSE = 2;
public static final int BIND_REQUEST = 3;
public static final int BIND_RESPONSE = 4;
public static final int COMPARE_REQUEST = 5;
public static final int COMPARE_RESPONSE = 6;
public static final int DEL_REQUEST = 7;
public static final int DEL_RESPONSE = 8;
public static final int EXTENDED_REQUEST = 9;
public static final int EXTENDED_RESPONSE = 10;
public static final int MODIFYDN_REQUEST = 11;
public static final int MODIFYDN_RESPONSE = 12;
public static final int MODIFY_REQUEST = 13;
public static final int MODIFY_RESPONSE = 14;
public static final int SEARCH_REQUEST = 15;
public static final int SEARCH_RESULT_DONE = 16;
public static final int SEARCH_RESULT_ENTRY = 17;
public static final int SEARCH_RESULT_REFERENCE = 18;
public static final int UNBIND_REQUEST = 19;
public static final int UNKNOWN = -1; // Just in case ...
}
| Fixed a bad transition number for BdinRequest : Sasl is 0xA3
git-svn-id: 5c3b06693d750a6aefbf1081b6b7d57c0165fdb2@369743 13f79535-47bb-0310-9956-ffa450edef68
| common/ldap/src/main/java/org/apache/ldap/common/codec/LdapConstants.java | Fixed a bad transition number for BdinRequest : Sasl is 0xA3 |
|
Java | apache-2.0 | 937c0481e0a1bc173dd5ddf1371d38ce13fa10d8 | 0 | maliqq/netty,castomer/netty,ioanbsu/netty,nayato/netty,sverkera/netty,wuxiaowei907/netty,zzcclp/netty,tbrooks8/netty,Scottmitch/netty,andsel/netty,smayoorans/netty,cnoldtree/netty,Kingson4Wu/netty,smayoorans/netty,zhujingling/netty,SinaTadayon/netty,caoyanwei/netty,kiril-me/netty,timboudreau/netty,Techcable/netty,lugt/netty,carl-mastrangelo/netty,jovezhougang/netty,andsel/netty,liuciuse/netty,nkhuyu/netty,brennangaunce/netty,AchinthaReemal/netty,kjniemi/netty,shelsonjava/netty,nadeeshaan/netty,afredlyj/learn-netty,woshilaiceshide/netty,wuxiaowei907/netty,afds/netty,WangJunTYTL/netty,Squarespace/netty,blucas/netty,rovarga/netty,AchinthaReemal/netty,Alwayswithme/netty,KatsuraKKKK/netty,LuminateWireless/netty,sunbeansoft/netty,mx657649013/netty,shism/netty,andsel/netty,louiscryan/netty,andsel/netty,yawkat/netty,jchambers/netty,Alwayswithme/netty,Apache9/netty,tempbottle/netty,zxhfirefox/netty,MediumOne/netty,JungMinu/netty,youprofit/netty,lukw00/netty,yrcourage/netty,x1957/netty,bob329/netty,nkhuyu/netty,castomer/netty,orika/netty,codevelop/netty,fengshao0907/netty,Alwayswithme/netty,imangry/netty-zh,duqiao/netty,daschl/netty,huanyi0723/netty,Squarespace/netty,buchgr/netty,AnselQiao/netty,lugt/netty,chanakaudaya/netty,mubarak/netty,mubarak/netty,lugt/netty,ijuma/netty,tbrooks8/netty,sja/netty,shenguoquan/netty,tempbottle/netty,rovarga/netty,danny200309/netty,phlizik/netty,wangyikai/netty,seetharamireddy540/netty,orika/netty,windie/netty,clebertsuconic/netty,youprofit/netty,golovnin/netty,Kalvar/netty,idelpivnitskiy/netty,yipen9/netty,imangry/netty-zh,idelpivnitskiy/netty,tempbottle/netty,chanakaudaya/netty,slandelle/netty,AchinthaReemal/netty,altihou/netty,altihou/netty,Techcable/netty,ijuma/netty,Techcable/netty,gigold/netty,youprofit/netty,chinayin/netty,mcobrien/netty,bryce-anderson/netty,WangJunTYTL/netty,wuyinxian124/netty,afds/netty,clebertsuconic/netty,artgon/netty,exinguu/netty,mx657649013/netty,sja/netty,codevelop/netty,doom369/netty,ejona86/netty,chrisprobst/netty,fengshao0907/netty,ninja-/netty,qingsong-xu/netty,IBYoung/netty,wangyikai/netty,s-gheldd/netty,sameira/netty,eonezhang/netty,AnselQiao/netty,satishsaley/netty,johnou/netty,bigheary/netty,silvaran/netty,sverkera/netty,WangJunTYTL/netty,danbev/netty,mcobrien/netty,x1957/netty,normanmaurer/netty,zer0se7en/netty,gigold/netty,carlbai/netty,Mounika-Chirukuri/netty,seetharamireddy540/netty,sunbeansoft/netty,balaprasanna/netty,bob329/netty,lukw00/netty,cnoldtree/netty,idelpivnitskiy/netty,BrunoColin/netty,unei66/netty,skyao/netty,f7753/netty,huuthang1993/netty,IBYoung/netty,xiongzheng/netty,ejona86/netty,hgl888/netty,brennangaunce/netty,orika/netty,djchen/netty,kiril-me/netty,nayato/netty,AchinthaReemal/netty,dongjiaqiang/netty,andsel/netty,LuminateWireless/netty,mway08/netty,SinaTadayon/netty,shenguoquan/netty,DavidAlphaFox/netty,ichaki5748/netty,caoyanwei/netty,yonglehou/netty-1,DolphinZhao/netty,Spikhalskiy/netty,hgl888/netty,jenskordowski/netty,mikkokar/netty,seetharamireddy540/netty,olupotd/netty,f7753/netty,gerdriesselmann/netty,LuminateWireless/netty,purplefox/netty-4.0.2.8-hacked,purplefox/netty-4.0.2.8-hacked,lukehutch/netty,lukehutch/netty,nat2013/netty,ifesdjeen/netty,duqiao/netty,nadeeshaan/netty,blademainer/netty,zxhfirefox/netty,luyiisme/netty,smayoorans/netty,carlbai/netty,kyle-liu/netty4study,mcanthony/netty,niuxinghua/netty,zhujingling/netty,youprofit/netty,MediumOne/netty,johnou/netty,danbev/netty,unei66/netty,KatsuraKKKK/netty,huuthang1993/netty,mway08/netty,moyiguket/netty,NiteshKant/netty,yipen9/netty,xingguang2013/netty,timboudreau/netty,slandelle/netty,shuangqiuan/netty,qingsong-xu/netty,cnoldtree/netty,yonglehou/netty-1,lugt/netty,johnou/netty,xiexingguang/netty,doom369/netty,mikkokar/netty,shism/netty,Kingson4Wu/netty,sameira/netty,daschl/netty,nmittler/netty,silvaran/netty,sammychen105/netty,lugt/netty,serioussam/netty,bigheary/netty,djchen/netty,lznhust/netty,balaprasanna/netty,xiongzheng/netty,huuthang1993/netty,normanmaurer/netty,Spikhalskiy/netty,clebertsuconic/netty,kvr000/netty,danny200309/netty,sverkera/netty,kiril-me/netty,slandelle/netty,AnselQiao/netty,Scottmitch/netty,castomer/netty,KatsuraKKKK/netty,shelsonjava/netty,zer0se7en/netty,jovezhougang/netty,chrisprobst/netty,buchgr/netty,qingsong-xu/netty,shuangqiuan/netty,yonglehou/netty-1,BrunoColin/netty,ioanbsu/netty,chinayin/netty,alkemist/netty,niuxinghua/netty,skyao/netty,balaprasanna/netty,jongyeol/netty,ngocdaothanh/netty,danbev/netty,drowning/netty,nayato/netty,bryce-anderson/netty,xiexingguang/netty,niuxinghua/netty,bob329/netty,carl-mastrangelo/netty,shelsonjava/netty,caoyanwei/netty,netty/netty,windie/netty,artgon/netty,JungMinu/netty,fengshao0907/netty,jdivy/netty,doom369/netty,balaprasanna/netty,firebase/netty,eincs/netty,satishsaley/netty,nkhuyu/netty,carlbai/netty,skyao/netty,KatsuraKKKK/netty,dongjiaqiang/netty,danbev/netty,mx657649013/netty,gigold/netty,serioussam/netty,eincs/netty,Mounika-Chirukuri/netty,Apache9/netty,mikkokar/netty,s-gheldd/netty,luyiisme/netty,olupotd/netty,mx657649013/netty,ioanbsu/netty,chanakaudaya/netty,alkemist/netty,wangyikai/netty,ngocdaothanh/netty,bryce-anderson/netty,gerdriesselmann/netty,Mounika-Chirukuri/netty,liyang1025/netty,ajaysarda/netty,maliqq/netty,SinaTadayon/netty,wangyikai/netty,zxhfirefox/netty,djchen/netty,chinayin/netty,Apache9/netty,huanyi0723/netty,hyangtack/netty,afds/netty,yawkat/netty,netty/netty,phlizik/netty,brennangaunce/netty,mway08/netty,mcanthony/netty,jchambers/netty,louiscryan/netty,Squarespace/netty,ejona86/netty,carl-mastrangelo/netty,DavidAlphaFox/netty,mcobrien/netty,yipen9/netty,tbrooks8/netty,liuciuse/netty,x1957/netty,niuxinghua/netty,joansmith/netty,fantayeneh/netty,drowning/netty,shelsonjava/netty,zer0se7en/netty,buchgr/netty,chrisprobst/netty,kjniemi/netty,shuangqiuan/netty,afredlyj/learn-netty,Spikhalskiy/netty,normanmaurer/netty,silvaran/netty,liyang1025/netty,carl-mastrangelo/netty,hyangtack/netty,huuthang1993/netty,blademainer/netty,purplefox/netty-4.0.2.8-hacked,fenik17/netty,jchambers/netty,jdivy/netty,huanyi0723/netty,satishsaley/netty,wuxiaowei907/netty,olupotd/netty,Kalvar/netty,louiscryan/netty,jongyeol/netty,ijuma/netty,maliqq/netty,lukw00/netty,lukw00/netty,WangJunTYTL/netty,fantayeneh/netty,johnou/netty,x1957/netty,huanyi0723/netty,WangJunTYTL/netty,zzcclp/netty,wuxiaowei907/netty,fenik17/netty,kjniemi/netty,zhoffice/netty,Kingson4Wu/netty,carlbai/netty,drowning/netty,yrcourage/netty,yonglehou/netty-1,dongjiaqiang/netty,hgl888/netty,hyangtack/netty,NiteshKant/netty,mosoft521/netty,louxiu/netty,sunbeansoft/netty,xiexingguang/netty,AnselQiao/netty,Kalvar/netty,sverkera/netty,ninja-/netty,mcanthony/netty,ajaysarda/netty,drowning/netty,Squarespace/netty,sja/netty,blademainer/netty,gigold/netty,johnou/netty,Scottmitch/netty,rovarga/netty,zhoffice/netty,exinguu/netty,MediumOne/netty,fengjiachun/netty,eincs/netty,kiril-me/netty,tbrooks8/netty,luyiisme/netty,mosoft521/netty,jchambers/netty,mubarak/netty,lukehutch/netty,liyang1025/netty,chinayin/netty,cnoldtree/netty,dongjiaqiang/netty,AchinthaReemal/netty,Spikhalskiy/netty,lightsocks/netty,chrisprobst/netty,lukehutch/netty,yipen9/netty,pengzj/netty,fantayeneh/netty,junjiemars/netty,xingguang2013/netty,blucas/netty,skyao/netty,junjiemars/netty,imangry/netty-zh,xiexingguang/netty,BrunoColin/netty,shenguoquan/netty,woshilaiceshide/netty,yrcourage/netty,cnoldtree/netty,djchen/netty,Kingson4Wu/netty,mubarak/netty,clebertsuconic/netty,seetharamireddy540/netty,louxiu/netty,jchambers/netty,zhoffice/netty,Techcable/netty,golovnin/netty,mx657649013/netty,alkemist/netty,Apache9/netty,Alwayswithme/netty,bigheary/netty,golovnin/netty,unei66/netty,firebase/netty,caoyanwei/netty,zhoffice/netty,moyiguket/netty,lznhust/netty,liuciuse/netty,bob329/netty,bob329/netty,jdivy/netty,mosoft521/netty,zhujingling/netty,fantayeneh/netty,jenskordowski/netty,mway08/netty,s-gheldd/netty,danbev/netty,Techcable/netty,olupotd/netty,seetharamireddy540/netty,hepin1989/netty,yrcourage/netty,mosoft521/netty,moyiguket/netty,timboudreau/netty,slandelle/netty,nayato/netty,NiteshKant/netty,lukehutch/netty,s-gheldd/netty,duqiao/netty,DavidAlphaFox/netty,orika/netty,ninja-/netty,artgon/netty,liyang1025/netty,CodingFabian/netty,yonglehou/netty-1,louxiu/netty,woshilaiceshide/netty,tbrooks8/netty,zhoffice/netty,artgon/netty,nat2013/netty,junjiemars/netty,ejona86/netty,dongjiaqiang/netty,xingguang2013/netty,Apache9/netty,jenskordowski/netty,mcobrien/netty,SinaTadayon/netty,buchgr/netty,ioanbsu/netty,blucas/netty,ijuma/netty,ifesdjeen/netty,huuthang1993/netty,ichaki5748/netty,shism/netty,sammychen105/netty,LuminateWireless/netty,ioanbsu/netty,Kingson4Wu/netty,louxiu/netty,moyiguket/netty,liuciuse/netty,zer0se7en/netty,yawkat/netty,zer0se7en/netty,wangyikai/netty,louiscryan/netty,jdivy/netty,sameira/netty,shism/netty,balaprasanna/netty,zzcclp/netty,unei66/netty,lightsocks/netty,silvaran/netty,ichaki5748/netty,sameira/netty,bigheary/netty,xingguang2013/netty,Kalvar/netty,jongyeol/netty,codevelop/netty,nat2013/netty,CodingFabian/netty,ngocdaothanh/netty,BrunoColin/netty,eonezhang/netty,NiteshKant/netty,eonezhang/netty,lukw00/netty,eonezhang/netty,lznhust/netty,maliqq/netty,Mounika-Chirukuri/netty,doom369/netty,ngocdaothanh/netty,Mounika-Chirukuri/netty,zxhfirefox/netty,xiexingguang/netty,brennangaunce/netty,f7753/netty,gerdriesselmann/netty,niuxinghua/netty,kiril-me/netty,normanmaurer/netty,danny200309/netty,f7753/netty,shenguoquan/netty,imangry/netty-zh,yawkat/netty,Kalvar/netty,serioussam/netty,lightsocks/netty,lightsocks/netty,exinguu/netty,louxiu/netty,daschl/netty,AnselQiao/netty,shuangqiuan/netty,netty/netty,LuminateWireless/netty,olupotd/netty,liyang1025/netty,clebertsuconic/netty,zhujingling/netty,jovezhougang/netty,x1957/netty,blucas/netty,mosoft521/netty,bryce-anderson/netty,ninja-/netty,kvr000/netty,tempbottle/netty,yawkat/netty,skyao/netty,phlizik/netty,doom369/netty,netty/netty,nayato/netty,chanakaudaya/netty,luyiisme/netty,jovezhougang/netty,silvaran/netty,shuangqiuan/netty,mcobrien/netty,nadeeshaan/netty,junjiemars/netty,xingguang2013/netty,Scottmitch/netty,gerdriesselmann/netty,fengjiachun/netty,carl-mastrangelo/netty,purplefox/netty-4.0.2.8-hacked,netty/netty,fengjiachun/netty,unei66/netty,sameira/netty,eonezhang/netty,nmittler/netty,ngocdaothanh/netty,bryce-anderson/netty,nmittler/netty,windie/netty,orika/netty,firebase/netty,hgl888/netty,hepin1989/netty,fenik17/netty,DolphinZhao/netty,menacher/netty,shelsonjava/netty,CodingFabian/netty,shenguoquan/netty,normanmaurer/netty,golovnin/netty,jenskordowski/netty,satishsaley/netty,NiteshKant/netty,SinaTadayon/netty,timboudreau/netty,imangry/netty-zh,ichaki5748/netty,hyangtack/netty,smayoorans/netty,pengzj/netty,danny200309/netty,golovnin/netty,jongyeol/netty,duqiao/netty,kyle-liu/netty4study,blademainer/netty,yrcourage/netty,MediumOne/netty,caoyanwei/netty,mikkokar/netty,kvr000/netty,bigheary/netty,jongyeol/netty,moyiguket/netty,huanyi0723/netty,sammychen105/netty,Squarespace/netty,luyiisme/netty,lznhust/netty,xiongzheng/netty,menacher/netty,lightsocks/netty,youprofit/netty,mcanthony/netty,fenik17/netty,ajaysarda/netty,sverkera/netty,IBYoung/netty,ajaysarda/netty,eincs/netty,wuxiaowei907/netty,nkhuyu/netty,joansmith/netty,DolphinZhao/netty,KatsuraKKKK/netty,IBYoung/netty,liuciuse/netty,develar/netty,idelpivnitskiy/netty,JungMinu/netty,IBYoung/netty,fengjiachun/netty,mway08/netty,jenskordowski/netty,CodingFabian/netty,louiscryan/netty,tempbottle/netty,chanakaudaya/netty,DolphinZhao/netty,windie/netty,nadeeshaan/netty,Spikhalskiy/netty,JungMinu/netty,lznhust/netty,ichaki5748/netty,junjiemars/netty,ejona86/netty,kjniemi/netty,mikkokar/netty,sja/netty,idelpivnitskiy/netty,chrisprobst/netty,sunbeansoft/netty,f7753/netty,wuyinxian124/netty,windie/netty,duqiao/netty,blucas/netty,artgon/netty,qingsong-xu/netty,firebase/netty,chinayin/netty,fantayeneh/netty,nkhuyu/netty,Scottmitch/netty,hepin1989/netty,eincs/netty,hepin1989/netty,wuyinxian124/netty,sunbeansoft/netty,afds/netty,blademainer/netty,danny200309/netty,codevelop/netty,djchen/netty,afds/netty,BrunoColin/netty,gigold/netty,DolphinZhao/netty,timboudreau/netty,brennangaunce/netty,CodingFabian/netty,alkemist/netty,exinguu/netty,castomer/netty,xiongzheng/netty,develar/netty,exinguu/netty,ajaysarda/netty,castomer/netty,jdivy/netty,kjniemi/netty,woshilaiceshide/netty,kvr000/netty,joansmith/netty,ninja-/netty,fengjiachun/netty,MediumOne/netty,pengzj/netty,xiongzheng/netty,s-gheldd/netty,woshilaiceshide/netty,zzcclp/netty,mcanthony/netty,carlbai/netty,zxhfirefox/netty,sja/netty,fenik17/netty,shism/netty,mubarak/netty,qingsong-xu/netty,smayoorans/netty,phlizik/netty,hgl888/netty,maliqq/netty,zzcclp/netty,joansmith/netty,wuyinxian124/netty,gerdriesselmann/netty,altihou/netty,DavidAlphaFox/netty,afredlyj/learn-netty,altihou/netty,joansmith/netty,Alwayswithme/netty,rovarga/netty,zhujingling/netty,serioussam/netty,pengzj/netty,serioussam/netty,alkemist/netty,kvr000/netty,jovezhougang/netty,nadeeshaan/netty,ijuma/netty,satishsaley/netty,altihou/netty | /*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.codec.http;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPipeline;
import io.netty.handler.codec.DecoderResult;
import io.netty.handler.codec.ReplayingDecoder;
import io.netty.handler.codec.TooLongFrameException;
import java.util.List;
/**
* Decodes {@link ByteBuf}s into {@link HttpMessage}s and
* {@link HttpChunk}s.
*
* <h3>Parameters that prevents excessive memory consumption</h3>
* <table border="1">
* <tr>
* <th>Name</th><th>Meaning</th>
* </tr>
* <tr>
* <td>{@code maxInitialLineLength}</td>
* <td>The maximum length of the initial line
* (e.g. {@code "GET / HTTP/1.0"} or {@code "HTTP/1.0 200 OK"})
* If the length of the initial line exceeds this value, a
* {@link TooLongFrameException} will be raised.</td>
* </tr>
* <tr>
* <td>{@code maxHeaderSize}</td>
* <td>The maximum length of all headers. If the sum of the length of each
* header exceeds this value, a {@link TooLongFrameException} will be raised.</td>
* </tr>
* <tr>
* <td>{@code maxChunkSize}</td>
* <td>The maximum length of the content or each chunk. If the content length
* (or the length of each chunk) exceeds this value, the content or chunk
* will be split into multiple {@link HttpChunk}s whose length is
* {@code maxChunkSize} at maximum.</td>
* </tr>
* </table>
*
* <h3>Chunked Content</h3>
*
* If the content of an HTTP message is greater than {@code maxChunkSize} or
* the transfer encoding of the HTTP message is 'chunked', this decoder
* generates one {@link HttpMessage} instance and its following
* {@link HttpChunk}s per single HTTP message to avoid excessive memory
* consumption. For example, the following HTTP message:
* <pre>
* GET / HTTP/1.1
* Transfer-Encoding: chunked
*
* 1a
* abcdefghijklmnopqrstuvwxyz
* 10
* 1234567890abcdef
* 0
* Content-MD5: ...
* <i>[blank line]</i>
* </pre>
* triggers {@link HttpRequestDecoder} to generate 4 objects:
* <ol>
* <li>An {@link HttpRequest} whose {@link HttpMessage#isChunked() chunked}
* property is {@code true},</li>
* <li>The first {@link HttpChunk} whose content is {@code 'abcdefghijklmnopqrstuvwxyz'},</li>
* <li>The second {@link HttpChunk} whose content is {@code '1234567890abcdef'}, and</li>
* <li>An {@link HttpChunkTrailer} which marks the end of the content.</li>
* </ol>
*
* If you prefer not to handle {@link HttpChunk}s by yourself for your
* convenience, insert {@link HttpChunkAggregator} after this decoder in the
* {@link ChannelPipeline}. However, please note that your server might not
* be as memory efficient as without the aggregator.
*
* <h3>Extensibility</h3>
*
* Please note that this decoder is designed to be extended to implement
* a protocol derived from HTTP, such as
* <a href="http://en.wikipedia.org/wiki/Real_Time_Streaming_Protocol">RTSP</a> and
* <a href="http://en.wikipedia.org/wiki/Internet_Content_Adaptation_Protocol">ICAP</a>.
* To implement the decoder of such a derived protocol, extend this class and
* implement all abstract methods properly.
* @apiviz.landmark
*/
public abstract class HttpMessageDecoder extends ReplayingDecoder<Object, HttpMessageDecoder.State> {
private final int maxInitialLineLength;
private final int maxHeaderSize;
private final int maxChunkSize;
private HttpMessage message;
private ByteBuf content;
private long chunkSize;
private int headerSize;
private int contentRead;
/**
* The internal state of {@link HttpMessageDecoder}.
* <em>Internal use only</em>.
* @apiviz.exclude
*/
protected enum State {
SKIP_CONTROL_CHARS,
READ_INITIAL,
READ_HEADER,
READ_VARIABLE_LENGTH_CONTENT,
READ_VARIABLE_LENGTH_CONTENT_AS_CHUNKS,
READ_FIXED_LENGTH_CONTENT,
READ_FIXED_LENGTH_CONTENT_AS_CHUNKS,
READ_CHUNK_SIZE,
READ_CHUNKED_CONTENT,
READ_CHUNKED_CONTENT_AS_CHUNKS,
READ_CHUNK_DELIMITER,
READ_CHUNK_FOOTER,
BAD_MESSAGE
}
/**
* Creates a new instance with the default
* {@code maxInitialLineLength (4096}}, {@code maxHeaderSize (8192)}, and
* {@code maxChunkSize (8192)}.
*/
protected HttpMessageDecoder() {
this(4096, 8192, 8192);
}
/**
* Creates a new instance with the specified parameters.
*/
protected HttpMessageDecoder(
int maxInitialLineLength, int maxHeaderSize, int maxChunkSize) {
super(State.SKIP_CONTROL_CHARS);
if (maxInitialLineLength <= 0) {
throw new IllegalArgumentException(
"maxInitialLineLength must be a positive integer: " +
maxInitialLineLength);
}
if (maxHeaderSize <= 0) {
throw new IllegalArgumentException(
"maxHeaderSize must be a positive integer: " +
maxHeaderSize);
}
if (maxChunkSize < 0) {
throw new IllegalArgumentException(
"maxChunkSize must be a positive integer: " +
maxChunkSize);
}
this.maxInitialLineLength = maxInitialLineLength;
this.maxHeaderSize = maxHeaderSize;
this.maxChunkSize = maxChunkSize;
}
@Override
public Object decode(ChannelHandlerContext ctx, ByteBuf buffer) throws Exception {
switch (state()) {
case SKIP_CONTROL_CHARS: {
try {
skipControlCharacters(buffer);
checkpoint(State.READ_INITIAL);
} finally {
checkpoint();
}
}
case READ_INITIAL: try {
String[] initialLine = splitInitialLine(readLine(buffer, maxInitialLineLength));
if (initialLine.length < 3) {
// Invalid initial line - ignore.
checkpoint(State.SKIP_CONTROL_CHARS);
return null;
}
message = createMessage(initialLine);
checkpoint(State.READ_HEADER);
} catch (Exception e) {
return invalidMessage(e);
}
case READ_HEADER: try {
State nextState = readHeaders(buffer);
checkpoint(nextState);
if (nextState == State.READ_CHUNK_SIZE) {
// Chunked encoding - generate HttpMessage first. HttpChunks will follow.
return message;
}
if (nextState == State.SKIP_CONTROL_CHARS) {
// No content is expected.
return reset();
}
long contentLength = HttpHeaders.getContentLength(message, -1);
if (contentLength == 0 || contentLength == -1 && isDecodingRequest()) {
content = Unpooled.EMPTY_BUFFER;
return reset();
}
switch (nextState) {
case READ_FIXED_LENGTH_CONTENT:
if (contentLength > maxChunkSize || HttpHeaders.is100ContinueExpected(message)) {
// Generate HttpMessage first. HttpChunks will follow.
checkpoint(State.READ_FIXED_LENGTH_CONTENT_AS_CHUNKS);
message.setTransferEncoding(HttpTransferEncoding.STREAMED);
// chunkSize will be decreased as the READ_FIXED_LENGTH_CONTENT_AS_CHUNKS
// state reads data chunk by chunk.
chunkSize = HttpHeaders.getContentLength(message, -1);
return message;
}
break;
case READ_VARIABLE_LENGTH_CONTENT:
if (buffer.readableBytes() > maxChunkSize || HttpHeaders.is100ContinueExpected(message)) {
// Generate HttpMessage first. HttpChunks will follow.
checkpoint(State.READ_VARIABLE_LENGTH_CONTENT_AS_CHUNKS);
message.setTransferEncoding(HttpTransferEncoding.STREAMED);
return message;
}
break;
default:
throw new IllegalStateException("Unexpected state: " + nextState);
}
// We return null here, this forces decode to be called again where we will decode the content
return null;
} catch (Exception e) {
return invalidMessage(e);
}
case READ_VARIABLE_LENGTH_CONTENT: {
int toRead = actualReadableBytes();
if (toRead > maxChunkSize) {
toRead = maxChunkSize;
}
if (message.getTransferEncoding() != HttpTransferEncoding.STREAMED) {
message.setTransferEncoding(HttpTransferEncoding.STREAMED);
return new Object[] { message, new DefaultHttpChunk(buffer.readBytes(toRead))};
} else {
return new DefaultHttpChunk(buffer.readBytes(toRead));
}
}
case READ_VARIABLE_LENGTH_CONTENT_AS_CHUNKS: {
// Keep reading data as a chunk until the end of connection is reached.
int toRead = actualReadableBytes();
if (toRead > maxChunkSize) {
toRead = maxChunkSize;
}
HttpChunk chunk = new DefaultHttpChunk(buffer.readBytes(toRead));
if (!buffer.readable()) {
// Reached to the end of the connection.
reset();
if (!chunk.isLast()) {
// Append the last chunk.
return new Object[] { chunk, HttpChunk.LAST_CHUNK };
}
}
return chunk;
}
case READ_FIXED_LENGTH_CONTENT: {
return readFixedLengthContent(buffer);
}
case READ_FIXED_LENGTH_CONTENT_AS_CHUNKS: {
assert chunkSize <= Integer.MAX_VALUE;
int chunkSize = (int) this.chunkSize;
int readLimit = actualReadableBytes();
// Check if the buffer is readable first as we use the readable byte count
// to create the HttpChunk. This is needed as otherwise we may end up with
// create a HttpChunk instance that contains an empty buffer and so is
// handled like it is the last HttpChunk.
//
// See https://github.com/netty/netty/issues/433
if (readLimit == 0) {
return null;
}
int toRead = chunkSize;
if (toRead > maxChunkSize) {
toRead = maxChunkSize;
}
if (toRead > readLimit) {
toRead = readLimit;
}
HttpChunk chunk = new DefaultHttpChunk(buffer.readBytes(toRead));
if (chunkSize > toRead) {
chunkSize -= toRead;
} else {
chunkSize = 0;
}
this.chunkSize = chunkSize;
if (chunkSize == 0) {
// Read all content.
reset();
if (!chunk.isLast()) {
// Append the last chunk.
return new Object[] { chunk, HttpChunk.LAST_CHUNK };
}
}
return chunk;
}
/**
* everything else after this point takes care of reading chunked content. basically, read chunk size,
* read chunk, read and ignore the CRLF and repeat until 0
*/
case READ_CHUNK_SIZE: try {
String line = readLine(buffer, maxInitialLineLength);
int chunkSize = getChunkSize(line);
this.chunkSize = chunkSize;
if (chunkSize == 0) {
checkpoint(State.READ_CHUNK_FOOTER);
return null;
} else if (chunkSize > maxChunkSize) {
// A chunk is too large. Split them into multiple chunks again.
checkpoint(State.READ_CHUNKED_CONTENT_AS_CHUNKS);
} else {
checkpoint(State.READ_CHUNKED_CONTENT);
}
} catch (Exception e) {
return invalidChunk(e);
}
case READ_CHUNKED_CONTENT: {
assert chunkSize <= Integer.MAX_VALUE;
HttpChunk chunk = new DefaultHttpChunk(buffer.readBytes((int) chunkSize));
checkpoint(State.READ_CHUNK_DELIMITER);
return chunk;
}
case READ_CHUNKED_CONTENT_AS_CHUNKS: {
assert chunkSize <= Integer.MAX_VALUE;
int chunkSize = (int) this.chunkSize;
int readLimit = actualReadableBytes();
// Check if the buffer is readable first as we use the readable byte count
// to create the HttpChunk. This is needed as otherwise we may end up with
// create a HttpChunk instance that contains an empty buffer and so is
// handled like it is the last HttpChunk.
//
// See https://github.com/netty/netty/issues/433
if (readLimit == 0) {
return null;
}
int toRead = chunkSize;
if (toRead > maxChunkSize) {
toRead = maxChunkSize;
}
if (toRead > readLimit) {
toRead = readLimit;
}
HttpChunk chunk = new DefaultHttpChunk(buffer.readBytes(toRead));
if (chunkSize > toRead) {
chunkSize -= toRead;
} else {
chunkSize = 0;
}
this.chunkSize = chunkSize;
if (chunkSize == 0) {
// Read all content.
checkpoint(State.READ_CHUNK_DELIMITER);
}
if (!chunk.isLast()) {
return chunk;
}
}
case READ_CHUNK_DELIMITER: {
for (;;) {
byte next = buffer.readByte();
if (next == HttpConstants.CR) {
if (buffer.readByte() == HttpConstants.LF) {
checkpoint(State.READ_CHUNK_SIZE);
return null;
}
} else if (next == HttpConstants.LF) {
checkpoint(State.READ_CHUNK_SIZE);
return null;
} else {
checkpoint();
}
}
}
case READ_CHUNK_FOOTER: try {
HttpChunkTrailer trailer = readTrailingHeaders(buffer);
if (maxChunkSize == 0) {
// Chunked encoding disabled.
return reset();
} else {
reset();
// The last chunk, which is empty
return trailer;
}
} catch (Exception e) {
return invalidChunk(e);
}
case BAD_MESSAGE: {
// Keep discarding until disconnection.
buffer.skipBytes(actualReadableBytes());
return null;
}
default: {
throw new Error("Shouldn't reach here.");
}
}
}
protected boolean isContentAlwaysEmpty(HttpMessage msg) {
if (msg instanceof HttpResponse) {
HttpResponse res = (HttpResponse) msg;
int code = res.getStatus().getCode();
// Correctly handle return codes of 1xx.
//
// See:
// - http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html Section 4.4
// - https://github.com/netty/netty/issues/222
if (code >= 100 && code < 200) {
if (code == 101 && !res.containsHeader(HttpHeaders.Names.SEC_WEBSOCKET_ACCEPT)) {
// It's Hixie 76 websocket handshake response
return false;
}
return true;
}
switch (code) {
case 204: case 205: case 304:
return true;
}
}
return false;
}
private Object reset() {
HttpMessage message = this.message;
ByteBuf content = this.content;
if (content != null) {
message.setContent(content);
this.content = null;
}
this.message = null;
checkpoint(State.SKIP_CONTROL_CHARS);
return message;
}
private HttpMessage invalidMessage(Exception cause) {
checkpoint(State.BAD_MESSAGE);
if (message != null) {
message.setDecoderResult(DecoderResult.partialFailure(cause));
} else {
message = createInvalidMessage();
message.setDecoderResult(DecoderResult.failure(cause));
}
return message;
}
private HttpChunk invalidChunk(Exception cause) {
checkpoint(State.BAD_MESSAGE);
HttpChunk chunk = new DefaultHttpChunk(Unpooled.EMPTY_BUFFER);
chunk.setDecoderResult(DecoderResult.failure(cause));
return chunk;
}
private static void skipControlCharacters(ByteBuf buffer) {
for (;;) {
char c = (char) buffer.readUnsignedByte();
if (!Character.isISOControl(c) &&
!Character.isWhitespace(c)) {
buffer.readerIndex(buffer.readerIndex() - 1);
break;
}
}
}
private Object readFixedLengthContent(ByteBuf buffer) {
//we have a content-length so we just read the correct number of bytes
long length = HttpHeaders.getContentLength(message, -1);
assert length <= Integer.MAX_VALUE;
int toRead = (int) length - contentRead;
if (toRead > actualReadableBytes()) {
toRead = actualReadableBytes();
}
contentRead += toRead;
if (length < contentRead) {
if (message.getTransferEncoding() != HttpTransferEncoding.STREAMED) {
message.setTransferEncoding(HttpTransferEncoding.STREAMED);
return new Object[] {message, new DefaultHttpChunk(buffer.readBytes(toRead))};
} else {
return new DefaultHttpChunk(buffer.readBytes(toRead));
}
}
if (content == null) {
content = buffer.readBytes((int) length);
} else {
content.writeBytes(buffer.readBytes((int) length));
}
return reset();
}
private State readHeaders(ByteBuf buffer) {
headerSize = 0;
final HttpMessage message = this.message;
String line = readHeader(buffer);
String name = null;
String value = null;
if (!line.isEmpty()) {
message.clearHeaders();
do {
char firstChar = line.charAt(0);
if (name != null && (firstChar == ' ' || firstChar == '\t')) {
value = value + ' ' + line.trim();
} else {
if (name != null) {
message.addHeader(name, value);
}
String[] header = splitHeader(line);
name = header[0];
value = header[1];
}
line = readHeader(buffer);
} while (!line.isEmpty());
// Add the last header.
if (name != null) {
message.addHeader(name, value);
}
}
State nextState;
if (isContentAlwaysEmpty(message)) {
message.setTransferEncoding(HttpTransferEncoding.SINGLE);
nextState = State.SKIP_CONTROL_CHARS;
} else if (HttpCodecUtil.isTransferEncodingChunked(message)) {
message.setTransferEncoding(HttpTransferEncoding.CHUNKED);
nextState = State.READ_CHUNK_SIZE;
} else if (HttpHeaders.getContentLength(message, -1) >= 0) {
nextState = State.READ_FIXED_LENGTH_CONTENT;
} else {
nextState = State.READ_VARIABLE_LENGTH_CONTENT;
}
return nextState;
}
private HttpChunkTrailer readTrailingHeaders(ByteBuf buffer) {
headerSize = 0;
String line = readHeader(buffer);
String lastHeader = null;
if (!line.isEmpty()) {
HttpChunkTrailer trailer = new DefaultHttpChunkTrailer();
do {
char firstChar = line.charAt(0);
if (lastHeader != null && (firstChar == ' ' || firstChar == '\t')) {
List<String> current = trailer.getHeaders(lastHeader);
if (!current.isEmpty()) {
int lastPos = current.size() - 1;
String newString = current.get(lastPos) + line.trim();
current.set(lastPos, newString);
} else {
// Content-Length, Transfer-Encoding, or Trailer
}
} else {
String[] header = splitHeader(line);
String name = header[0];
if (!name.equalsIgnoreCase(HttpHeaders.Names.CONTENT_LENGTH) &&
!name.equalsIgnoreCase(HttpHeaders.Names.TRANSFER_ENCODING) &&
!name.equalsIgnoreCase(HttpHeaders.Names.TRAILER)) {
trailer.addHeader(name, header[1]);
}
lastHeader = name;
}
line = readHeader(buffer);
} while (!line.isEmpty());
return trailer;
}
return HttpChunk.LAST_CHUNK;
}
private String readHeader(ByteBuf buffer) {
StringBuilder sb = new StringBuilder(64);
int headerSize = this.headerSize;
loop:
for (;;) {
char nextByte = (char) buffer.readByte();
headerSize ++;
switch (nextByte) {
case HttpConstants.CR:
nextByte = (char) buffer.readByte();
headerSize ++;
if (nextByte == HttpConstants.LF) {
break loop;
}
break;
case HttpConstants.LF:
break loop;
}
// Abort decoding if the header part is too large.
if (headerSize >= maxHeaderSize) {
// TODO: Respond with Bad Request and discard the traffic
// or close the connection.
// No need to notify the upstream handlers - just log.
// If decoding a response, just throw an exception.
throw new TooLongFrameException(
"HTTP header is larger than " +
maxHeaderSize + " bytes.");
}
sb.append(nextByte);
}
this.headerSize = headerSize;
return sb.toString();
}
protected abstract boolean isDecodingRequest();
protected abstract HttpMessage createMessage(String[] initialLine) throws Exception;
protected abstract HttpMessage createInvalidMessage();
private static int getChunkSize(String hex) {
hex = hex.trim();
for (int i = 0; i < hex.length(); i ++) {
char c = hex.charAt(i);
if (c == ';' || Character.isWhitespace(c) || Character.isISOControl(c)) {
hex = hex.substring(0, i);
break;
}
}
return Integer.parseInt(hex, 16);
}
private static String readLine(ByteBuf buffer, int maxLineLength) {
StringBuilder sb = new StringBuilder(64);
int lineLength = 0;
while (true) {
byte nextByte = buffer.readByte();
if (nextByte == HttpConstants.CR) {
nextByte = buffer.readByte();
if (nextByte == HttpConstants.LF) {
return sb.toString();
}
} else if (nextByte == HttpConstants.LF) {
return sb.toString();
} else {
if (lineLength >= maxLineLength) {
// TODO: Respond with Bad Request and discard the traffic
// or close the connection.
// No need to notify the upstream handlers - just log.
// If decoding a response, just throw an exception.
throw new TooLongFrameException(
"An HTTP line is larger than " + maxLineLength +
" bytes.");
}
lineLength ++;
sb.append((char) nextByte);
}
}
}
private static String[] splitInitialLine(String sb) {
int aStart;
int aEnd;
int bStart;
int bEnd;
int cStart;
int cEnd;
aStart = findNonWhitespace(sb, 0);
aEnd = findWhitespace(sb, aStart);
bStart = findNonWhitespace(sb, aEnd);
bEnd = findWhitespace(sb, bStart);
cStart = findNonWhitespace(sb, bEnd);
cEnd = findEndOfString(sb);
return new String[] {
sb.substring(aStart, aEnd),
sb.substring(bStart, bEnd),
cStart < cEnd? sb.substring(cStart, cEnd) : "" };
}
private static String[] splitHeader(String sb) {
final int length = sb.length();
int nameStart;
int nameEnd;
int colonEnd;
int valueStart;
int valueEnd;
nameStart = findNonWhitespace(sb, 0);
for (nameEnd = nameStart; nameEnd < length; nameEnd ++) {
char ch = sb.charAt(nameEnd);
if (ch == ':' || Character.isWhitespace(ch)) {
break;
}
}
for (colonEnd = nameEnd; colonEnd < length; colonEnd ++) {
if (sb.charAt(colonEnd) == ':') {
colonEnd ++;
break;
}
}
valueStart = findNonWhitespace(sb, colonEnd);
if (valueStart == length) {
return new String[] {
sb.substring(nameStart, nameEnd),
""
};
}
valueEnd = findEndOfString(sb);
return new String[] {
sb.substring(nameStart, nameEnd),
sb.substring(valueStart, valueEnd)
};
}
private static int findNonWhitespace(String sb, int offset) {
int result;
for (result = offset; result < sb.length(); result ++) {
if (!Character.isWhitespace(sb.charAt(result))) {
break;
}
}
return result;
}
private static int findWhitespace(String sb, int offset) {
int result;
for (result = offset; result < sb.length(); result ++) {
if (Character.isWhitespace(sb.charAt(result))) {
break;
}
}
return result;
}
private static int findEndOfString(String sb) {
int result;
for (result = sb.length(); result > 0; result --) {
if (!Character.isWhitespace(sb.charAt(result - 1))) {
break;
}
}
return result;
}
}
| codec-http/src/main/java/io/netty/handler/codec/http/HttpMessageDecoder.java | /*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.codec.http;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPipeline;
import io.netty.handler.codec.DecoderResult;
import io.netty.handler.codec.ReplayingDecoder;
import io.netty.handler.codec.TooLongFrameException;
import java.util.List;
/**
* Decodes {@link ByteBuf}s into {@link HttpMessage}s and
* {@link HttpChunk}s.
*
* <h3>Parameters that prevents excessive memory consumption</h3>
* <table border="1">
* <tr>
* <th>Name</th><th>Meaning</th>
* </tr>
* <tr>
* <td>{@code maxInitialLineLength}</td>
* <td>The maximum length of the initial line
* (e.g. {@code "GET / HTTP/1.0"} or {@code "HTTP/1.0 200 OK"})
* If the length of the initial line exceeds this value, a
* {@link TooLongFrameException} will be raised.</td>
* </tr>
* <tr>
* <td>{@code maxHeaderSize}</td>
* <td>The maximum length of all headers. If the sum of the length of each
* header exceeds this value, a {@link TooLongFrameException} will be raised.</td>
* </tr>
* <tr>
* <td>{@code maxChunkSize}</td>
* <td>The maximum length of the content or each chunk. If the content length
* (or the length of each chunk) exceeds this value, the content or chunk
* will be split into multiple {@link HttpChunk}s whose length is
* {@code maxChunkSize} at maximum.</td>
* </tr>
* </table>
*
* <h3>Chunked Content</h3>
*
* If the content of an HTTP message is greater than {@code maxChunkSize} or
* the transfer encoding of the HTTP message is 'chunked', this decoder
* generates one {@link HttpMessage} instance and its following
* {@link HttpChunk}s per single HTTP message to avoid excessive memory
* consumption. For example, the following HTTP message:
* <pre>
* GET / HTTP/1.1
* Transfer-Encoding: chunked
*
* 1a
* abcdefghijklmnopqrstuvwxyz
* 10
* 1234567890abcdef
* 0
* Content-MD5: ...
* <i>[blank line]</i>
* </pre>
* triggers {@link HttpRequestDecoder} to generate 4 objects:
* <ol>
* <li>An {@link HttpRequest} whose {@link HttpMessage#isChunked() chunked}
* property is {@code true},</li>
* <li>The first {@link HttpChunk} whose content is {@code 'abcdefghijklmnopqrstuvwxyz'},</li>
* <li>The second {@link HttpChunk} whose content is {@code '1234567890abcdef'}, and</li>
* <li>An {@link HttpChunkTrailer} which marks the end of the content.</li>
* </ol>
*
* If you prefer not to handle {@link HttpChunk}s by yourself for your
* convenience, insert {@link HttpChunkAggregator} after this decoder in the
* {@link ChannelPipeline}. However, please note that your server might not
* be as memory efficient as without the aggregator.
*
* <h3>Extensibility</h3>
*
* Please note that this decoder is designed to be extended to implement
* a protocol derived from HTTP, such as
* <a href="http://en.wikipedia.org/wiki/Real_Time_Streaming_Protocol">RTSP</a> and
* <a href="http://en.wikipedia.org/wiki/Internet_Content_Adaptation_Protocol">ICAP</a>.
* To implement the decoder of such a derived protocol, extend this class and
* implement all abstract methods properly.
* @apiviz.landmark
*/
public abstract class HttpMessageDecoder extends ReplayingDecoder<Object, HttpMessageDecoder.State> {
private final int maxInitialLineLength;
private final int maxHeaderSize;
private final int maxChunkSize;
private HttpMessage message;
private ByteBuf content;
private long chunkSize;
private int headerSize;
private int contentRead;
/**
* The internal state of {@link HttpMessageDecoder}.
* <em>Internal use only</em>.
* @apiviz.exclude
*/
protected enum State {
SKIP_CONTROL_CHARS,
READ_INITIAL,
READ_HEADER,
READ_VARIABLE_LENGTH_CONTENT,
READ_VARIABLE_LENGTH_CONTENT_AS_CHUNKS,
READ_FIXED_LENGTH_CONTENT,
READ_FIXED_LENGTH_CONTENT_AS_CHUNKS,
READ_CHUNK_SIZE,
READ_CHUNKED_CONTENT,
READ_CHUNKED_CONTENT_AS_CHUNKS,
READ_CHUNK_DELIMITER,
READ_CHUNK_FOOTER,
BAD_MESSAGE
}
/**
* Creates a new instance with the default
* {@code maxInitialLineLength (4096}}, {@code maxHeaderSize (8192)}, and
* {@code maxChunkSize (8192)}.
*/
protected HttpMessageDecoder() {
this(4096, 8192, 8192);
}
/**
* Creates a new instance with the specified parameters.
*/
protected HttpMessageDecoder(
int maxInitialLineLength, int maxHeaderSize, int maxChunkSize) {
super(State.SKIP_CONTROL_CHARS);
if (maxInitialLineLength <= 0) {
throw new IllegalArgumentException(
"maxInitialLineLength must be a positive integer: " +
maxInitialLineLength);
}
if (maxHeaderSize <= 0) {
throw new IllegalArgumentException(
"maxHeaderSize must be a positive integer: " +
maxHeaderSize);
}
if (maxChunkSize < 0) {
throw new IllegalArgumentException(
"maxChunkSize must be a positive integer: " +
maxChunkSize);
}
this.maxInitialLineLength = maxInitialLineLength;
this.maxHeaderSize = maxHeaderSize;
this.maxChunkSize = maxChunkSize;
}
@Override
public Object decode(ChannelHandlerContext ctx, ByteBuf buffer) throws Exception {
switch (state()) {
case SKIP_CONTROL_CHARS: {
try {
skipControlCharacters(buffer);
checkpoint(State.READ_INITIAL);
} finally {
checkpoint();
}
}
case READ_INITIAL: try {
String[] initialLine = splitInitialLine(readLine(buffer, maxInitialLineLength));
if (initialLine.length < 3) {
// Invalid initial line - ignore.
checkpoint(State.SKIP_CONTROL_CHARS);
return null;
}
message = createMessage(initialLine);
checkpoint(State.READ_HEADER);
} catch (Exception e) {
return invalidMessage(e);
}
case READ_HEADER: try {
State nextState = readHeaders(buffer);
checkpoint(nextState);
if (nextState == State.READ_CHUNK_SIZE) {
// Chunked encoding - generate HttpMessage first. HttpChunks will follow.
return message;
}
if (nextState == State.SKIP_CONTROL_CHARS) {
// No content is expected.
return reset();
}
long contentLength = HttpHeaders.getContentLength(message, -1);
if (contentLength == 0 || contentLength == -1 && isDecodingRequest()) {
content = Unpooled.EMPTY_BUFFER;
return reset();
}
switch (nextState) {
case READ_FIXED_LENGTH_CONTENT:
if (contentLength > maxChunkSize || HttpHeaders.is100ContinueExpected(message)) {
// Generate HttpMessage first. HttpChunks will follow.
checkpoint(State.READ_FIXED_LENGTH_CONTENT_AS_CHUNKS);
message.setTransferEncoding(HttpTransferEncoding.STREAMED);
// chunkSize will be decreased as the READ_FIXED_LENGTH_CONTENT_AS_CHUNKS
// state reads data chunk by chunk.
chunkSize = HttpHeaders.getContentLength(message, -1);
return message;
}
break;
case READ_VARIABLE_LENGTH_CONTENT:
if (buffer.readableBytes() > maxChunkSize || HttpHeaders.is100ContinueExpected(message)) {
// Generate HttpMessage first. HttpChunks will follow.
checkpoint(State.READ_VARIABLE_LENGTH_CONTENT_AS_CHUNKS);
message.setTransferEncoding(HttpTransferEncoding.STREAMED);
return message;
}
break;
default:
throw new IllegalStateException("Unexpected state: " + nextState);
}
// We return null here, this forces decode to be called again where we will decode the content
return null;
} catch (Exception e) {
return invalidMessage(e);
}
case READ_VARIABLE_LENGTH_CONTENT: {
int toRead = actualReadableBytes();
if (toRead > maxChunkSize) {
toRead = maxChunkSize;
}
if (message.getTransferEncoding() != HttpTransferEncoding.STREAMED) {
message.setTransferEncoding(HttpTransferEncoding.STREAMED);
return new Object[] { message, new DefaultHttpChunk(buffer.readBytes(toRead))};
} else {
return new DefaultHttpChunk(buffer.readBytes(toRead));
}
}
case READ_VARIABLE_LENGTH_CONTENT_AS_CHUNKS: {
// Keep reading data as a chunk until the end of connection is reached.
int toRead = actualReadableBytes();
if (toRead > maxChunkSize) {
toRead = maxChunkSize;
}
HttpChunk chunk = new DefaultHttpChunk(buffer.readBytes(toRead));
if (!buffer.readable()) {
// Reached to the end of the connection.
reset();
if (!chunk.isLast()) {
// Append the last chunk.
return new Object[] { chunk, HttpChunk.LAST_CHUNK };
}
}
return chunk;
}
case READ_FIXED_LENGTH_CONTENT: {
return readFixedLengthContent(buffer);
}
case READ_FIXED_LENGTH_CONTENT_AS_CHUNKS: {
assert chunkSize <= Integer.MAX_VALUE;
int chunkSize = (int) this.chunkSize;
int readLimit = actualReadableBytes();
// Check if the buffer is readable first as we use the readable byte count
// to create the HttpChunk. This is needed as otherwise we may end up with
// create a HttpChunk instance that contains an empty buffer and so is
// handled like it is the last HttpChunk.
//
// See https://github.com/netty/netty/issues/433
if (readLimit == 0) {
return null;
}
int toRead = chunkSize;
if (toRead > maxChunkSize) {
toRead = maxChunkSize;
}
if (toRead > readLimit) {
toRead = readLimit;
}
HttpChunk chunk = new DefaultHttpChunk(buffer.readBytes(toRead));
if (chunkSize > toRead) {
chunkSize -= toRead;
} else {
chunkSize = 0;
}
this.chunkSize = chunkSize;
if (chunkSize == 0) {
// Read all content.
reset();
if (!chunk.isLast()) {
// Append the last chunk.
return new Object[] { chunk, HttpChunk.LAST_CHUNK };
}
}
return chunk;
}
/**
* everything else after this point takes care of reading chunked content. basically, read chunk size,
* read chunk, read and ignore the CRLF and repeat until 0
*/
case READ_CHUNK_SIZE: try {
String line = readLine(buffer, maxInitialLineLength);
int chunkSize = getChunkSize(line);
this.chunkSize = chunkSize;
if (chunkSize == 0) {
checkpoint(State.READ_CHUNK_FOOTER);
return null;
} else if (chunkSize > maxChunkSize) {
// A chunk is too large. Split them into multiple chunks again.
checkpoint(State.READ_CHUNKED_CONTENT_AS_CHUNKS);
} else {
checkpoint(State.READ_CHUNKED_CONTENT);
}
} catch (Exception e) {
return invalidChunk(e);
}
case READ_CHUNKED_CONTENT: {
assert chunkSize <= Integer.MAX_VALUE;
HttpChunk chunk = new DefaultHttpChunk(buffer.readBytes((int) chunkSize));
checkpoint(State.READ_CHUNK_DELIMITER);
return chunk;
}
case READ_CHUNKED_CONTENT_AS_CHUNKS: {
assert chunkSize <= Integer.MAX_VALUE;
int chunkSize = (int) this.chunkSize;
int readLimit = actualReadableBytes();
// Check if the buffer is readable first as we use the readable byte count
// to create the HttpChunk. This is needed as otherwise we may end up with
// create a HttpChunk instance that contains an empty buffer and so is
// handled like it is the last HttpChunk.
//
// See https://github.com/netty/netty/issues/433
if (readLimit == 0) {
return null;
}
int toRead = chunkSize;
if (toRead > maxChunkSize) {
toRead = maxChunkSize;
}
if (toRead > readLimit) {
toRead = readLimit;
}
HttpChunk chunk = new DefaultHttpChunk(buffer.readBytes(toRead));
if (chunkSize > toRead) {
chunkSize -= toRead;
} else {
chunkSize = 0;
}
this.chunkSize = chunkSize;
if (chunkSize == 0) {
// Read all content.
checkpoint(State.READ_CHUNK_DELIMITER);
}
if (!chunk.isLast()) {
return chunk;
}
}
case READ_CHUNK_DELIMITER: {
for (;;) {
byte next = buffer.readByte();
if (next == HttpConstants.CR) {
if (buffer.readByte() == HttpConstants.LF) {
checkpoint(State.READ_CHUNK_SIZE);
return null;
}
} else if (next == HttpConstants.LF) {
checkpoint(State.READ_CHUNK_SIZE);
return null;
} else {
checkpoint();
}
}
}
case READ_CHUNK_FOOTER: try {
HttpChunkTrailer trailer = readTrailingHeaders(buffer);
if (maxChunkSize == 0) {
// Chunked encoding disabled.
return reset();
} else {
reset();
// The last chunk, which is empty
return trailer;
}
} catch (Exception e) {
return invalidChunk(e);
}
case BAD_MESSAGE: {
// Keep discarding until disconnection.
buffer.skipBytes(actualReadableBytes());
return null;
}
default: {
throw new Error("Shouldn't reach here.");
}
}
}
protected boolean isContentAlwaysEmpty(HttpMessage msg) {
if (msg instanceof HttpResponse) {
HttpResponse res = (HttpResponse) msg;
int code = res.getStatus().getCode();
// Correctly handle return codes of 1xx.
//
// See:
// - http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html Section 4.4
// - https://github.com/netty/netty/issues/222
if (code >= 100 && code < 200) {
if (code == 101 && !res.containsHeader(HttpHeaders.Names.SEC_WEBSOCKET_ACCEPT)) {
// It's Hixie 76 websocket handshake response
return false;
}
return true;
}
switch (code) {
case 204: case 205: case 304:
return true;
}
}
return false;
}
private Object reset() {
HttpMessage message = this.message;
ByteBuf content = this.content;
if (content != null) {
message.setContent(content);
this.content = null;
}
this.message = null;
checkpoint(State.SKIP_CONTROL_CHARS);
return message;
}
private HttpMessage invalidMessage(Exception cause) {
checkpoint(State.BAD_MESSAGE);
if (message != null) {
message.setDecoderResult(DecoderResult.partialFailure(cause));
} else {
message = createInvalidMessage();
message.setDecoderResult(DecoderResult.failure(cause));
}
return message;
}
private HttpChunk invalidChunk(Exception cause) {
checkpoint(State.BAD_MESSAGE);
HttpChunk chunk = new DefaultHttpChunk(Unpooled.EMPTY_BUFFER);
chunk.setDecoderResult(DecoderResult.failure(cause));
return chunk;
}
private static void skipControlCharacters(ByteBuf buffer) {
for (;;) {
char c = (char) buffer.readUnsignedByte();
if (!Character.isISOControl(c) &&
!Character.isWhitespace(c)) {
buffer.readerIndex(buffer.readerIndex() - 1);
break;
}
}
}
private Object readFixedLengthContent(ByteBuf buffer) {
//we have a content-length so we just read the correct number of bytes
long length = HttpHeaders.getContentLength(message, -1);
assert length <= Integer.MAX_VALUE;
int toRead = (int) length - contentRead;
if (toRead > actualReadableBytes()) {
toRead = actualReadableBytes();
}
contentRead += toRead;
if (length < contentRead) {
if (message.getTransferEncoding() != HttpTransferEncoding.STREAMED) {
message.setTransferEncoding(HttpTransferEncoding.STREAMED);
return new Object[] {message, new DefaultHttpChunk(read(buffer, toRead))};
} else {
return new DefaultHttpChunk(read(buffer, toRead));
}
}
if (content == null) {
content = read(buffer, (int) length);
} else {
content.writeBytes(buffer.readBytes((int) length));
}
return reset();
}
/**
* Try to do an optimized "read" of len from the given {@link ByteBuf}.
*
* This is part of #412 to safe byte copies
*
*/
private ByteBuf read(ByteBuf buffer, int len) {
ByteBuf internal = internalBuffer();
if (internal.readableBytes() >= len) {
int index = internal.readerIndex();
ByteBuf buf = internal.slice(index, len);
// update the readerindex so an the next read its on the correct position
buffer.readerIndex(index + len);
return buf;
} else {
return buffer.readBytes(len);
}
}
private State readHeaders(ByteBuf buffer) {
headerSize = 0;
final HttpMessage message = this.message;
String line = readHeader(buffer);
String name = null;
String value = null;
if (!line.isEmpty()) {
message.clearHeaders();
do {
char firstChar = line.charAt(0);
if (name != null && (firstChar == ' ' || firstChar == '\t')) {
value = value + ' ' + line.trim();
} else {
if (name != null) {
message.addHeader(name, value);
}
String[] header = splitHeader(line);
name = header[0];
value = header[1];
}
line = readHeader(buffer);
} while (!line.isEmpty());
// Add the last header.
if (name != null) {
message.addHeader(name, value);
}
}
State nextState;
if (isContentAlwaysEmpty(message)) {
message.setTransferEncoding(HttpTransferEncoding.SINGLE);
nextState = State.SKIP_CONTROL_CHARS;
} else if (HttpCodecUtil.isTransferEncodingChunked(message)) {
message.setTransferEncoding(HttpTransferEncoding.CHUNKED);
nextState = State.READ_CHUNK_SIZE;
} else if (HttpHeaders.getContentLength(message, -1) >= 0) {
nextState = State.READ_FIXED_LENGTH_CONTENT;
} else {
nextState = State.READ_VARIABLE_LENGTH_CONTENT;
}
return nextState;
}
private HttpChunkTrailer readTrailingHeaders(ByteBuf buffer) {
headerSize = 0;
String line = readHeader(buffer);
String lastHeader = null;
if (!line.isEmpty()) {
HttpChunkTrailer trailer = new DefaultHttpChunkTrailer();
do {
char firstChar = line.charAt(0);
if (lastHeader != null && (firstChar == ' ' || firstChar == '\t')) {
List<String> current = trailer.getHeaders(lastHeader);
if (!current.isEmpty()) {
int lastPos = current.size() - 1;
String newString = current.get(lastPos) + line.trim();
current.set(lastPos, newString);
} else {
// Content-Length, Transfer-Encoding, or Trailer
}
} else {
String[] header = splitHeader(line);
String name = header[0];
if (!name.equalsIgnoreCase(HttpHeaders.Names.CONTENT_LENGTH) &&
!name.equalsIgnoreCase(HttpHeaders.Names.TRANSFER_ENCODING) &&
!name.equalsIgnoreCase(HttpHeaders.Names.TRAILER)) {
trailer.addHeader(name, header[1]);
}
lastHeader = name;
}
line = readHeader(buffer);
} while (!line.isEmpty());
return trailer;
}
return HttpChunk.LAST_CHUNK;
}
private String readHeader(ByteBuf buffer) {
StringBuilder sb = new StringBuilder(64);
int headerSize = this.headerSize;
loop:
for (;;) {
char nextByte = (char) buffer.readByte();
headerSize ++;
switch (nextByte) {
case HttpConstants.CR:
nextByte = (char) buffer.readByte();
headerSize ++;
if (nextByte == HttpConstants.LF) {
break loop;
}
break;
case HttpConstants.LF:
break loop;
}
// Abort decoding if the header part is too large.
if (headerSize >= maxHeaderSize) {
// TODO: Respond with Bad Request and discard the traffic
// or close the connection.
// No need to notify the upstream handlers - just log.
// If decoding a response, just throw an exception.
throw new TooLongFrameException(
"HTTP header is larger than " +
maxHeaderSize + " bytes.");
}
sb.append(nextByte);
}
this.headerSize = headerSize;
return sb.toString();
}
protected abstract boolean isDecodingRequest();
protected abstract HttpMessage createMessage(String[] initialLine) throws Exception;
protected abstract HttpMessage createInvalidMessage();
private static int getChunkSize(String hex) {
hex = hex.trim();
for (int i = 0; i < hex.length(); i ++) {
char c = hex.charAt(i);
if (c == ';' || Character.isWhitespace(c) || Character.isISOControl(c)) {
hex = hex.substring(0, i);
break;
}
}
return Integer.parseInt(hex, 16);
}
private static String readLine(ByteBuf buffer, int maxLineLength) {
StringBuilder sb = new StringBuilder(64);
int lineLength = 0;
while (true) {
byte nextByte = buffer.readByte();
if (nextByte == HttpConstants.CR) {
nextByte = buffer.readByte();
if (nextByte == HttpConstants.LF) {
return sb.toString();
}
} else if (nextByte == HttpConstants.LF) {
return sb.toString();
} else {
if (lineLength >= maxLineLength) {
// TODO: Respond with Bad Request and discard the traffic
// or close the connection.
// No need to notify the upstream handlers - just log.
// If decoding a response, just throw an exception.
throw new TooLongFrameException(
"An HTTP line is larger than " + maxLineLength +
" bytes.");
}
lineLength ++;
sb.append((char) nextByte);
}
}
}
private static String[] splitInitialLine(String sb) {
int aStart;
int aEnd;
int bStart;
int bEnd;
int cStart;
int cEnd;
aStart = findNonWhitespace(sb, 0);
aEnd = findWhitespace(sb, aStart);
bStart = findNonWhitespace(sb, aEnd);
bEnd = findWhitespace(sb, bStart);
cStart = findNonWhitespace(sb, bEnd);
cEnd = findEndOfString(sb);
return new String[] {
sb.substring(aStart, aEnd),
sb.substring(bStart, bEnd),
cStart < cEnd? sb.substring(cStart, cEnd) : "" };
}
private static String[] splitHeader(String sb) {
final int length = sb.length();
int nameStart;
int nameEnd;
int colonEnd;
int valueStart;
int valueEnd;
nameStart = findNonWhitespace(sb, 0);
for (nameEnd = nameStart; nameEnd < length; nameEnd ++) {
char ch = sb.charAt(nameEnd);
if (ch == ':' || Character.isWhitespace(ch)) {
break;
}
}
for (colonEnd = nameEnd; colonEnd < length; colonEnd ++) {
if (sb.charAt(colonEnd) == ':') {
colonEnd ++;
break;
}
}
valueStart = findNonWhitespace(sb, colonEnd);
if (valueStart == length) {
return new String[] {
sb.substring(nameStart, nameEnd),
""
};
}
valueEnd = findEndOfString(sb);
return new String[] {
sb.substring(nameStart, nameEnd),
sb.substring(valueStart, valueEnd)
};
}
private static int findNonWhitespace(String sb, int offset) {
int result;
for (result = offset; result < sb.length(); result ++) {
if (!Character.isWhitespace(sb.charAt(result))) {
break;
}
}
return result;
}
private static int findWhitespace(String sb, int offset) {
int result;
for (result = offset; result < sb.length(); result ++) {
if (Character.isWhitespace(sb.charAt(result))) {
break;
}
}
return result;
}
private static int findEndOfString(String sb) {
int result;
for (result = sb.length(); result > 0; result --) {
if (!Character.isWhitespace(sb.charAt(result - 1))) {
break;
}
}
return result;
}
}
| Do not use slice() to get the content of HTTP msg
- Fixes #794
| codec-http/src/main/java/io/netty/handler/codec/http/HttpMessageDecoder.java | Do not use slice() to get the content of HTTP msg |
|
Java | apache-2.0 | 0b7887b703d1f5cc0a8f1f5ad425ab6de0e64620 | 0 | codeabovelab/haven-platform,codeabovelab/haven-platform,codeabovelab/haven-platform,codeabovelab/haven-platform | /*
* Copyright 2016 Code Above Lab LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.codeabovelab.dm.cluman.ui;
import com.codeabovelab.dm.cluman.cluster.docker.ClusterConfigImpl;
import com.codeabovelab.dm.cluman.cluster.docker.management.ApplicationService;
import com.codeabovelab.dm.cluman.cluster.docker.management.DockerService;
import com.codeabovelab.dm.cluman.cluster.docker.management.DockerUtils;
import com.codeabovelab.dm.cluman.cluster.docker.management.argument.GetContainersArg;
import com.codeabovelab.dm.cluman.cluster.registry.RegistryRepository;
import com.codeabovelab.dm.cluman.ds.DockerServiceRegistry;
import com.codeabovelab.dm.cluman.ds.clusters.RealCluster;
import com.codeabovelab.dm.cluman.ds.clusters.SwarmNodesGroupConfig;
import com.codeabovelab.dm.cluman.ds.container.ContainerStorage;
import com.codeabovelab.dm.cluman.ds.nodes.NodeStorage;
import com.codeabovelab.dm.cluman.job.JobInstance;
import com.codeabovelab.dm.cluman.model.*;
import com.codeabovelab.dm.cluman.security.AclContext;
import com.codeabovelab.dm.cluman.security.AclContextFactory;
import com.codeabovelab.dm.cluman.security.SecuredType;
import com.codeabovelab.dm.cluman.source.DeployOptions;
import com.codeabovelab.dm.cluman.source.SourceService;
import com.codeabovelab.dm.cluman.ui.model.*;
import com.codeabovelab.dm.cluman.validate.ExtendedAssert;
import com.codeabovelab.dm.cluman.yaml.YamlUtils;
import com.codeabovelab.dm.common.cache.DefineCache;
import com.codeabovelab.dm.common.cache.MessageBusCacheInvalidator;
import com.codeabovelab.dm.common.security.Authorities;
import com.codeabovelab.dm.common.utils.Sugar;
import io.swagger.annotations.ApiOperation;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.security.access.annotation.Secured;
import org.springframework.util.MimeTypeUtils;
import org.springframework.web.bind.annotation.*;
import java.util.*;
import java.util.stream.Collectors;
import static org.springframework.http.MediaType.APPLICATION_JSON_VALUE;
import static org.springframework.web.bind.annotation.RequestMethod.*;
/**
* Rest controller for UI
*/
@RestController
@Slf4j
@RequestMapping(value = "/ui/api", produces = APPLICATION_JSON_VALUE)
@RequiredArgsConstructor(onConstructor = @__(@Autowired))
public class ClusterApi {
private final DockerServiceRegistry dockerServiceRegistry;
private final RegistryRepository registryRepository;
private final NodeStorage nodeRegistry;
private final SourceService sourceService;
private final DiscoveryStorage discoveryStorage;
private final ApplicationService applicationService;
private final ContainerStorage containerStorage;
private final FilterApi filterApi;
private final AclContextFactory aclContextFactory;
@RequestMapping(value = "/clusters/", method = GET)
public List<UiCluster> listClusters() {
AclContext ac = aclContextFactory.getContext();
Collection<NodesGroup> clusters = this.discoveryStorage.getClusters();
List<UiCluster> ucs = clusters.stream().map(c -> this.toUi(ac, c)).collect(Collectors.toList());
ucs.sort(Comparator.naturalOrder());
return ucs;
}
@RequestMapping(value = "/cluster/{cluster}", method = GET)
public UiCluster getCluster(@PathVariable("cluster") String cluster) {
AclContext ac = aclContextFactory.getContext();
NodesGroup nodesGroup = discoveryStorage.getCluster(cluster);
ExtendedAssert.notFound(nodesGroup, "Cluster was not found by " + cluster);
return toUi(ac, nodesGroup);
}
private UiCluster toUi(AclContext ac, NodesGroup cluster) {
UiCluster uc = new UiCluster();
final String name = cluster.getName();
uc.setName(name);
uc.getTitle().accept(cluster.getTitle());
uc.getDescription().accept(cluster.getDescription());
uc.getFilter().accept(cluster.getImageFilter());
uc.setFeatures(cluster.getFeatures());
if (cluster.getConfig() instanceof SwarmNodesGroupConfig) {
SwarmNodesGroupConfig swarmNodesGroupConfig = (SwarmNodesGroupConfig) cluster.getConfig();
uc.setConfig(ClusterConfigImpl.builder(swarmNodesGroupConfig.getConfig()));
}
try {
DockerServiceInfo info = cluster.getDocker().getInfo();
uc.setContainers(new UiCluster.Entry(info.getContainers(), info.getOffContainers()));
uc.setNodes(new UiCluster.Entry(info.getNodeCount(), info.getOffNodeCount()));
} catch (AccessDeniedException e) {
uc.setContainers(new UiCluster.Entry(0, 0));
uc.setNodes(new UiCluster.Entry(0, 0));
//nothing
}
try {
Set<String> apps = uc.getApplications();
List<Application> applications = applicationService.getApplications(name);
applications.forEach(a -> apps.add(a.getName()));
} catch (Exception e) {
//nothing
}
UiPermission.inject(uc, ac, SecuredType.CLUSTER.id(name));
return uc;
}
@RequestMapping(value = "/clusters/{cluster}/containers", method = GET)
public ResponseEntity<Collection<UiContainer>> listContainers(@PathVariable("cluster") String cluster) {
AclContext ac = aclContextFactory.getContext();
List<UiContainer> list = new ArrayList<>();
GetContainersArg arg = new GetContainersArg(true);
NodesGroup nodesGroup = discoveryStorage.getCluster(cluster);
ExtendedAssert.notFound(nodesGroup, "Cluster was not found by " + cluster);
DockerService service = nodesGroup.getDocker();
Map<String, String> apps = UiUtils.mapAppContainer(applicationService, nodesGroup);
ExtendedAssert.notFound(service, "Service for " + cluster + " is null.");
List<DockerContainer> containers = service.getContainers(arg);
for (DockerContainer container : containers) {
UiContainer uic = UiContainer.from(container);
uic.enrich(discoveryStorage, containerStorage);
uic.setApplication(apps.get(uic.getId()));
UiPermission.inject(uic, ac, SecuredType.CONTAINER.id(uic.getId()));
list.add(uic);
}
Collections.sort(list);
return new ResponseEntity<>(list, HttpStatus.OK);
}
@RequestMapping(value = "/clusters/{cluster}/containers", method = PUT)
public ResponseEntity<Collection<UiContainer>> filteredListContainers(@PathVariable("cluster") String cluster,
@RequestBody UISearchQuery searchQuery) {
ResponseEntity<Collection<UiContainer>> listResponseEntity = listContainers(cluster);
Collection<UiContainer> body = listResponseEntity.getBody();
Collection<UiContainer> uiContainers = filterApi.listNodes(body, searchQuery);
return new ResponseEntity<>(uiContainers, HttpStatus.OK);
}
@RequestMapping(value = "/clusters/{cluster}/info", method = GET)
@Cacheable("SwarmInfo")
@DefineCache(
expireAfterWrite = 120_000,
invalidator = MessageBusCacheInvalidator.class,
invalidatorArgs = {
MessageBusCacheInvalidator.BUS_KEY, NodeEvent.BUS
}
)
public DockerServiceInfo info(@PathVariable("cluster") String cluster) {
return dockerServiceRegistry.getService(cluster).getInfo();
}
@RequestMapping(value = "/clusters/{cluster}/nodes-detailed", method = GET)
@Cacheable("UINode")
@DefineCache(
expireAfterWrite = 120_000,
invalidator = MessageBusCacheInvalidator.class,
invalidatorArgs = {
MessageBusCacheInvalidator.BUS_KEY, NodeEvent.BUS
}
)
public List<NodeInfo> listNodesDetailed(@PathVariable("cluster") String cluster) {
DockerServiceInfo info = dockerServiceRegistry.getService(cluster).getInfo();
return info.getNodeList();
}
@RequestMapping(value = "/clusters/{cluster}/nodes", method = GET)
public List<String> listNodes(@PathVariable("cluster") String cluster) {
DockerServiceInfo info = dockerServiceRegistry.getService(cluster).getInfo();
return DockerUtils.listNodes(info);
}
@ApiOperation("Add node to specified cluster. Node must be present in same environment wit cluster.")
@RequestMapping(value = "/clusters/{cluster}/nodes/{node}", method = POST)
public ResponseEntity<?> addNode(@PathVariable("cluster") String clusterId, @PathVariable("node") String node) {
// we setup cluster
NodesGroup cluster = discoveryStorage.getOrCreateCluster(clusterId, null);
//and then attach node to it
if (cluster.getFeatures().contains(NodesGroup.Feature.FORBID_NODE_ADDITION)) {
throw new HttpException(HttpStatus.BAD_REQUEST, "Cluster: " + clusterId + " does not allow addition of nodes.");
}
nodeRegistry.setNodeCluster(node, clusterId);
return new ResponseEntity<>(HttpStatus.OK);
}
@ApiOperation("Remove node from specified cluster. Also you can use 'all' cluster or any other - node will be correctly removed anyway.")
@RequestMapping(value = "/clusters/{cluster}/nodes/{node}", method = DELETE)
public ResponseEntity<?> removeNode(@PathVariable("cluster") String clusterId, @PathVariable("node") String node) {
nodeRegistry.setNodeCluster(node, null);
return new ResponseEntity<>(HttpStatus.OK);
}
@RequestMapping(value = "/clusters/{cluster}/registries", method = GET)
public List<String> getRegistriesForCluster(@PathVariable("cluster") String cluster) {
Collection<String> availableRegistries = registryRepository.getAvailableRegistries();
NodesGroup nodesGroup = discoveryStorage.getCluster(cluster);
List<String> registries = new ArrayList<>();
ExtendedAssert.notFound(nodesGroup, "Cluster was not found by " + cluster);
if (nodesGroup.getConfig() instanceof SwarmNodesGroupConfig) {
SwarmNodesGroupConfig swarmNodesGroupConfig = (SwarmNodesGroupConfig) nodesGroup.getConfig();
registries.addAll(swarmNodesGroupConfig.getConfig().getRegistries());
}
registries.retainAll(availableRegistries);
return registries;
}
@RequestMapping(value = "/clusters/{cluster}/source", method = GET, produces = YamlUtils.MIME_TYPE_VALUE)
public ResponseEntity<RootSource> getClusterSource(@PathVariable("cluster") String cluster) {
RootSource root = sourceService.getClusterSource(cluster);
ExtendedAssert.notFound(root, "Can not find cluster with name: " + cluster);
HttpHeaders headers = new HttpHeaders();
String confName = com.codeabovelab.dm.common.utils.StringUtils.retainForFileName(cluster);
if (confName.isEmpty()) {
confName = "config";
}
headers.set(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + confName + ".json\"");
return new ResponseEntity<>(root, headers, HttpStatus.OK);
}
@RequestMapping(value = "/clusters/{cluster}/source", method = POST, consumes = YamlUtils.MIME_TYPE_VALUE)
public UiJob setClusterSource(@PathVariable("cluster") String cluster,
DeployOptions.Builder options,
@RequestBody RootSource rootSource) {
return setRootSrc(cluster, options, rootSource);
}
@RequestMapping(value = "/clusters/{cluster}/source-upload", method = POST, consumes = MimeTypeUtils.MULTIPART_FORM_DATA_VALUE)
public UiJob uploadClusterSource(@PathVariable("cluster") String cluster,
DeployOptions.Builder options,
@RequestPart("file") RootSource rootSource) {
return setRootSrc(cluster, options, rootSource);
}
private UiJob setRootSrc(String cluster, DeployOptions.Builder options, RootSource rootSource) {
List<ClusterSource> clusters = rootSource.getClusters();
if (clusters.isEmpty()) {
throw new IllegalArgumentException("No clusters in source");
}
if (clusters.size() > 1) {
throw new IllegalArgumentException("Too many clusters in source, accept only one.");
}
// update name of cluster, because name from path more priority than from source
clusters.get(0).setName(cluster);
JobInstance jobInstance = sourceService.setRootSource(rootSource, options.build());
return UiJob.toUi(jobInstance);
}
@RequestMapping(value = "/clusters/{cluster}", method = DELETE)
public void deleteCluster(@PathVariable("cluster") String cluster) {
discoveryStorage.deleteCluster(cluster);
}
/**
* @param clusterName
* @param clusterData
*/
@Secured({Authorities.ADMIN_ROLE, SecuredType.CLUSTER_ADMIN})
@RequestMapping(value = "/clusters/{cluster}", method = PUT)
public void createCluster(@PathVariable("cluster") String clusterName, @RequestBody(required = false) UiClusterEditablePart clusterData) {
log.info("about to create cluster: [{}], {}", clusterName, clusterData);
SwarmNodesGroupConfig sgnc = new SwarmNodesGroupConfig();
sgnc.setName(clusterName);
ClusterConfigImpl.Builder ccib = ClusterConfigImpl.builder(RealCluster.getDefaultConfig(clusterName));
if (clusterData != null) {
ccib.merge(clusterData.getConfig());
}
sgnc.setConfig(ccib.build());
NodesGroup cluster = discoveryStorage.getOrCreateGroup(sgnc);
if (clusterData != null) {
Sugar.setIfChanged(cluster::setTitle, clusterData.getTitle());
Sugar.setIfChanged(cluster::setDescription, clusterData.getDescription());
Sugar.setIfChanged(cluster::setImageFilter, clusterData.getFilter());
// we cannot change strategy for created cluster, maybe we need restart swarm for it?
// cluster.setStrategy(clusterData.getStrategy());
}
log.info("Cluster created: {}", cluster);
cluster.flush();
}
}
| cluster-manager/src/main/java/com/codeabovelab/dm/cluman/ui/ClusterApi.java | /*
* Copyright 2016 Code Above Lab LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.codeabovelab.dm.cluman.ui;
import com.codeabovelab.dm.cluman.cluster.docker.ClusterConfigImpl;
import com.codeabovelab.dm.cluman.cluster.docker.management.ApplicationService;
import com.codeabovelab.dm.cluman.cluster.docker.management.DockerService;
import com.codeabovelab.dm.cluman.cluster.docker.management.DockerUtils;
import com.codeabovelab.dm.cluman.cluster.docker.management.argument.GetContainersArg;
import com.codeabovelab.dm.cluman.cluster.registry.RegistryRepository;
import com.codeabovelab.dm.cluman.job.JobInstance;
import com.codeabovelab.dm.cluman.security.AclContext;
import com.codeabovelab.dm.cluman.security.AclContextFactory;
import com.codeabovelab.dm.cluman.security.SecuredType;
import com.codeabovelab.dm.cluman.source.DeployOptions;
import com.codeabovelab.dm.cluman.source.SourceService;
import com.codeabovelab.dm.cluman.ds.DockerServiceRegistry;
import com.codeabovelab.dm.cluman.ds.clusters.RealCluster;
import com.codeabovelab.dm.cluman.ds.clusters.SwarmNodesGroupConfig;
import com.codeabovelab.dm.cluman.ds.container.ContainerStorage;
import com.codeabovelab.dm.cluman.ds.nodes.NodeStorage;
import com.codeabovelab.dm.cluman.model.*;
import com.codeabovelab.dm.cluman.ui.model.*;
import com.codeabovelab.dm.cluman.validate.ExtendedAssert;
import com.codeabovelab.dm.cluman.yaml.YamlUtils;
import com.codeabovelab.dm.common.cache.DefineCache;
import com.codeabovelab.dm.common.cache.MessageBusCacheInvalidator;
import com.codeabovelab.dm.common.security.Authorities;
import com.codeabovelab.dm.common.utils.Sugar;
import io.swagger.annotations.ApiOperation;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.security.access.annotation.Secured;
import org.springframework.util.MimeTypeUtils;
import org.springframework.web.bind.annotation.*;
import java.util.*;
import java.util.stream.Collectors;
import static org.springframework.http.MediaType.APPLICATION_JSON_VALUE;
import static org.springframework.web.bind.annotation.RequestMethod.*;
/**
* Rest controller for UI
*/
@RestController
@Slf4j
@RequestMapping(value = "/ui/api", produces = APPLICATION_JSON_VALUE)
@RequiredArgsConstructor(onConstructor = @__(@Autowired))
public class ClusterApi {
private final DockerServiceRegistry dockerServiceRegistry;
private final RegistryRepository registryRepository;
private final NodeStorage nodeRegistry;
private final SourceService sourceService;
private final DiscoveryStorage discoveryStorage;
private final ApplicationService applicationService;
private final ContainerStorage containerStorage;
private final FilterApi filterApi;
private final AclContextFactory aclContextFactory;
@RequestMapping(value = "/clusters/", method = GET)
public List<UiCluster> listClusters() {
AclContext ac = aclContextFactory.getContext();
Collection<NodesGroup> clusters = this.discoveryStorage.getClusters();
List<UiCluster> ucs = clusters.stream().map(c -> this.toUi(ac, c)).collect(Collectors.toList());
ucs.sort(Comparator.naturalOrder());
return ucs;
}
@RequestMapping(value = "/cluster/{cluster}", method = GET)
public UiCluster getCluster(@PathVariable("cluster") String cluster) {
AclContext ac = aclContextFactory.getContext();
NodesGroup nodesGroup = discoveryStorage.getCluster(cluster);
ExtendedAssert.notFound(nodesGroup, "Cluster was not found by " + cluster);
return toUi(ac, nodesGroup);
}
private UiCluster toUi(AclContext ac, NodesGroup cluster) {
UiCluster uc = new UiCluster();
final String name = cluster.getName();
uc.setName(name);
uc.getTitle().accept(cluster.getTitle());
uc.getDescription().accept(cluster.getDescription());
uc.getFilter().accept(cluster.getImageFilter());
uc.setFeatures(cluster.getFeatures());
if (cluster.getConfig() instanceof SwarmNodesGroupConfig) {
SwarmNodesGroupConfig swarmNodesGroupConfig = (SwarmNodesGroupConfig) cluster.getConfig();
uc.setConfig(ClusterConfigImpl.builder(swarmNodesGroupConfig.getConfig()));
}
try {
DockerServiceInfo info = cluster.getDocker().getInfo();
uc.setContainers(new UiCluster.Entry(info.getContainers(), info.getOffContainers()));
uc.setNodes(new UiCluster.Entry(info.getNodeCount(), info.getOffNodeCount()));
} catch (AccessDeniedException e) {
uc.setContainers(new UiCluster.Entry(0, 0));
uc.setNodes(new UiCluster.Entry(0, 0));
//nothing
}
try {
Set<String> apps = uc.getApplications();
List<Application> applications = applicationService.getApplications(name);
applications.forEach(a -> apps.add(a.getName()));
} catch (Exception e) {
//nothing
}
UiPermission.inject(uc, ac, SecuredType.CLUSTER.id(name));
return uc;
}
@RequestMapping(value = "/clusters/{cluster}/containers", method = GET)
public ResponseEntity<Collection<UiContainer>> listContainers(@PathVariable("cluster") String cluster) {
AclContext ac = aclContextFactory.getContext();
List<UiContainer> list = new ArrayList<>();
GetContainersArg arg = new GetContainersArg(true);
NodesGroup nodesGroup = discoveryStorage.getCluster(cluster);
ExtendedAssert.notFound(nodesGroup, "Cluster was not found by " + cluster);
DockerService service = nodesGroup.getDocker();
Map<String, String> apps = UiUtils.mapAppContainer(applicationService, nodesGroup);
ExtendedAssert.notFound(service, "Service for " + cluster + " is null.");
List<DockerContainer> containers = service.getContainers(arg);
for (DockerContainer container : containers) {
UiContainer uic = UiContainer.from(container);
uic.enrich(discoveryStorage, containerStorage);
uic.setApplication(apps.get(uic.getId()));
UiPermission.inject(uic, ac, SecuredType.CONTAINER.id(uic.getId()));
list.add(uic);
}
Collections.sort(list);
return new ResponseEntity<>(list, HttpStatus.OK);
}
@RequestMapping(value = "/clusters/{cluster}/containers", method = PUT)
public ResponseEntity<Collection<UiContainer>> filteredListContainers(@PathVariable("cluster") String cluster,
@RequestBody UISearchQuery searchQuery) {
ResponseEntity<Collection<UiContainer>> listResponseEntity = listContainers(cluster);
Collection<UiContainer> body = listResponseEntity.getBody();
Collection<UiContainer> uiContainers = filterApi.listNodes(body, searchQuery);
return new ResponseEntity<>(uiContainers, HttpStatus.OK);
}
@RequestMapping(value = "/clusters/{cluster}/info", method = GET)
@Cacheable("SwarmInfo")
@DefineCache(
expireAfterWrite = 120_000,
invalidator = MessageBusCacheInvalidator.class,
invalidatorArgs = {
MessageBusCacheInvalidator.BUS_KEY, NodeEvent.BUS
}
)
public DockerServiceInfo info(@PathVariable("cluster") String cluster) {
return dockerServiceRegistry.getService(cluster).getInfo();
}
@RequestMapping(value = "/clusters/{cluster}/nodes-detailed", method = GET)
@Cacheable("UINode")
@DefineCache(
expireAfterWrite = 120_000,
invalidator = MessageBusCacheInvalidator.class,
invalidatorArgs = {
MessageBusCacheInvalidator.BUS_KEY, NodeEvent.BUS
}
)
public List<NodeInfo> listNodesDetailed(@PathVariable("cluster") String cluster) {
DockerServiceInfo info = dockerServiceRegistry.getService(cluster).getInfo();
return info.getNodeList();
}
@RequestMapping(value = "/clusters/{cluster}/nodes", method = GET)
public List<String> listNodes(@PathVariable("cluster") String cluster) {
DockerServiceInfo info = dockerServiceRegistry.getService(cluster).getInfo();
return DockerUtils.listNodes(info);
}
@ApiOperation("Add node to specified cluster. Node must be present in same environment wit cluster.")
@RequestMapping(value = "/clusters/{cluster}/nodes/{node}", method = POST)
public ResponseEntity<?> addNode(@PathVariable("cluster") String clusterId, @PathVariable("node") String node) {
// we setup cluster
NodesGroup cluster = discoveryStorage.getOrCreateCluster(clusterId, null);
//and then attach node to it
if (cluster.getFeatures().contains(NodesGroup.Feature.FORBID_NODE_ADDITION)) {
throw new HttpException(HttpStatus.BAD_REQUEST, "Cluster: " + clusterId + " does not allow addition of nodes.");
}
nodeRegistry.setNodeCluster(node, clusterId);
return new ResponseEntity<>(HttpStatus.OK);
}
@ApiOperation("Remove node from specified cluster. Also you can use 'all' cluster or any other - node will be correctly removed anyway.")
@RequestMapping(value = "/clusters/{cluster}/nodes/{node}", method = DELETE)
public ResponseEntity<?> removeNode(@PathVariable("cluster") String clusterId, @PathVariable("node") String node) {
nodeRegistry.setNodeCluster(node, null);
return new ResponseEntity<>(HttpStatus.OK);
}
@RequestMapping(value = "/clusters/{cluster}/registries", method = GET)
public List<String> getRegistriesForCluster(@PathVariable("cluster") String cluster) {
Collection<String> availableRegistries = registryRepository.getAvailableRegistries();
DockerService service = dockerServiceRegistry.getService(cluster);
List<String> registries = service.getClusterConfig().getRegistries();
if (registries == null || registries.isEmpty()) {
return Collections.emptyList();
}
List<String> intersection = new ArrayList<>(availableRegistries);
intersection.retainAll(registries);
return intersection;
}
@RequestMapping(value = "/clusters/{cluster}/source", method = GET, produces = YamlUtils.MIME_TYPE_VALUE)
public ResponseEntity<RootSource> getClusterSource(@PathVariable("cluster") String cluster) {
RootSource root = sourceService.getClusterSource(cluster);
ExtendedAssert.notFound(root, "Can not find cluster with name: " + cluster);
HttpHeaders headers = new HttpHeaders();
String confName = com.codeabovelab.dm.common.utils.StringUtils.retainForFileName(cluster);
if (confName.isEmpty()) {
confName = "config";
}
headers.set(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + confName + ".json\"");
return new ResponseEntity<>(root, headers, HttpStatus.OK);
}
@RequestMapping(value = "/clusters/{cluster}/source", method = POST, consumes = YamlUtils.MIME_TYPE_VALUE)
public UiJob setClusterSource(@PathVariable("cluster") String cluster,
DeployOptions.Builder options,
@RequestBody RootSource rootSource) {
return setRootSrc(cluster, options, rootSource);
}
@RequestMapping(value = "/clusters/{cluster}/source-upload", method = POST, consumes = MimeTypeUtils.MULTIPART_FORM_DATA_VALUE)
public UiJob uploadClusterSource(@PathVariable("cluster") String cluster,
DeployOptions.Builder options,
@RequestPart("file") RootSource rootSource) {
return setRootSrc(cluster, options, rootSource);
}
private UiJob setRootSrc(String cluster, DeployOptions.Builder options, RootSource rootSource) {
List<ClusterSource> clusters = rootSource.getClusters();
if (clusters.isEmpty()) {
throw new IllegalArgumentException("No clusters in source");
}
if (clusters.size() > 1) {
throw new IllegalArgumentException("Too many clusters in source, accept only one.");
}
// update name of cluster, because name from path more priority than from source
clusters.get(0).setName(cluster);
JobInstance jobInstance = sourceService.setRootSource(rootSource, options.build());
return UiJob.toUi(jobInstance);
}
@RequestMapping(value = "/clusters/{cluster}", method = DELETE)
public void deleteCluster(@PathVariable("cluster") String cluster) {
discoveryStorage.deleteCluster(cluster);
}
/**
* @param clusterName
* @param clusterData
*/
@Secured({Authorities.ADMIN_ROLE, SecuredType.CLUSTER_ADMIN})
@RequestMapping(value = "/clusters/{cluster}", method = PUT)
public void createCluster(@PathVariable("cluster") String clusterName, @RequestBody(required = false) UiClusterEditablePart clusterData) {
log.info("about to create cluster: [{}], {}", clusterName, clusterData);
SwarmNodesGroupConfig sgnc = new SwarmNodesGroupConfig();
sgnc.setName(clusterName);
ClusterConfigImpl.Builder ccib = ClusterConfigImpl.builder(RealCluster.getDefaultConfig(clusterName));
if (clusterData != null) {
ccib.merge(clusterData.getConfig());
}
sgnc.setConfig(ccib.build());
NodesGroup cluster = discoveryStorage.getOrCreateGroup(sgnc);
if (clusterData != null) {
Sugar.setIfChanged(cluster::setTitle, clusterData.getTitle());
Sugar.setIfChanged(cluster::setDescription, clusterData.getDescription());
Sugar.setIfChanged(cluster::setImageFilter, clusterData.getFilter());
// we cannot change strategy for created cluster, maybe we need restart swarm for it?
// cluster.setStrategy(clusterData.getStrategy());
}
log.info("Cluster created: {}", cluster);
cluster.flush();
}
}
| fixed getRegistriesForCluster
| cluster-manager/src/main/java/com/codeabovelab/dm/cluman/ui/ClusterApi.java | fixed getRegistriesForCluster |
|
Java | apache-2.0 | 9fa9e07682be519ead5e3210264fed4285b99f03 | 0 | ernestp/consulo,youdonghai/intellij-community,fnouama/intellij-community,robovm/robovm-studio,tmpgit/intellij-community,asedunov/intellij-community,SerCeMan/intellij-community,pwoodworth/intellij-community,ol-loginov/intellij-community,fengbaicanhe/intellij-community,vladmm/intellij-community,MichaelNedzelsky/intellij-community,semonte/intellij-community,izonder/intellij-community,akosyakov/intellij-community,lucafavatella/intellij-community,ryano144/intellij-community,blademainer/intellij-community,TangHao1987/intellij-community,akosyakov/intellij-community,TangHao1987/intellij-community,fitermay/intellij-community,petteyg/intellij-community,ftomassetti/intellij-community,xfournet/intellij-community,consulo/consulo,tmpgit/intellij-community,ol-loginov/intellij-community,retomerz/intellij-community,retomerz/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,petteyg/intellij-community,alphafoobar/intellij-community,dslomov/intellij-community,orekyuu/intellij-community,ahb0327/intellij-community,kdwink/intellij-community,Distrotech/intellij-community,fnouama/intellij-community,ryano144/intellij-community,pwoodworth/intellij-community,blademainer/intellij-community,retomerz/intellij-community,TangHao1987/intellij-community,signed/intellij-community,ahb0327/intellij-community,xfournet/intellij-community,orekyuu/intellij-community,TangHao1987/intellij-community,muntasirsyed/intellij-community,samthor/intellij-community,idea4bsd/idea4bsd,MichaelNedzelsky/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,xfournet/intellij-community,semonte/intellij-community,alphafoobar/intellij-community,muntasirsyed/intellij-community,dslomov/intellij-community,hurricup/intellij-community,fnouama/intellij-community,dslomov/intellij-community,izonder/intellij-community,fitermay/intellij-community,tmpgit/intellij-community,youdonghai/intellij-community,MER-GROUP/intellij-community,ibinti/intellij-community,allotria/intellij-community,petteyg/intellij-community,vladmm/intellij-community,ol-loginov/intellij-community,akosyakov/intellij-community,jexp/idea2,akosyakov/intellij-community,TangHao1987/intellij-community,idea4bsd/idea4bsd,diorcety/intellij-community,fengbaicanhe/intellij-community,Lekanich/intellij-community,ThiagoGarciaAlves/intellij-community,gnuhub/intellij-community,ryano144/intellij-community,clumsy/intellij-community,Lekanich/intellij-community,consulo/consulo,mglukhikh/intellij-community,samthor/intellij-community,ThiagoGarciaAlves/intellij-community,ryano144/intellij-community,ivan-fedorov/intellij-community,akosyakov/intellij-community,clumsy/intellij-community,blademainer/intellij-community,suncycheng/intellij-community,kdwink/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,amith01994/intellij-community,MER-GROUP/intellij-community,TangHao1987/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,izonder/intellij-community,vladmm/intellij-community,MichaelNedzelsky/intellij-community,robovm/robovm-studio,slisson/intellij-community,ibinti/intellij-community,apixandru/intellij-community,signed/intellij-community,Distrotech/intellij-community,alphafoobar/intellij-community,ibinti/intellij-community,caot/intellij-community,ftomassetti/intellij-community,allotria/intellij-community,kool79/intellij-community,FHannes/intellij-community,ftomassetti/intellij-community,ahb0327/intellij-community,signed/intellij-community,lucafavatella/intellij-community,vladmm/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,izonder/intellij-community,ahb0327/intellij-community,consulo/consulo,suncycheng/intellij-community,dslomov/intellij-community,tmpgit/intellij-community,salguarnieri/intellij-community,ibinti/intellij-community,hurricup/intellij-community,clumsy/intellij-community,tmpgit/intellij-community,fengbaicanhe/intellij-community,ahb0327/intellij-community,TangHao1987/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,Lekanich/intellij-community,fnouama/intellij-community,orekyuu/intellij-community,gnuhub/intellij-community,kool79/intellij-community,wreckJ/intellij-community,slisson/intellij-community,salguarnieri/intellij-community,idea4bsd/idea4bsd,izonder/intellij-community,salguarnieri/intellij-community,petteyg/intellij-community,apixandru/intellij-community,samthor/intellij-community,joewalnes/idea-community,vvv1559/intellij-community,clumsy/intellij-community,FHannes/intellij-community,petteyg/intellij-community,ryano144/intellij-community,holmes/intellij-community,MichaelNedzelsky/intellij-community,alphafoobar/intellij-community,idea4bsd/idea4bsd,alphafoobar/intellij-community,izonder/intellij-community,SerCeMan/intellij-community,idea4bsd/idea4bsd,ryano144/intellij-community,fnouama/intellij-community,hurricup/intellij-community,MER-GROUP/intellij-community,Distrotech/intellij-community,Lekanich/intellij-community,suncycheng/intellij-community,blademainer/intellij-community,kdwink/intellij-community,da1z/intellij-community,MER-GROUP/intellij-community,salguarnieri/intellij-community,kool79/intellij-community,asedunov/intellij-community,xfournet/intellij-community,diorcety/intellij-community,muntasirsyed/intellij-community,orekyuu/intellij-community,adedayo/intellij-community,amith01994/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,hurricup/intellij-community,youdonghai/intellij-community,akosyakov/intellij-community,wreckJ/intellij-community,ahb0327/intellij-community,youdonghai/intellij-community,pwoodworth/intellij-community,Lekanich/intellij-community,alphafoobar/intellij-community,michaelgallacher/intellij-community,ibinti/intellij-community,semonte/intellij-community,ahb0327/intellij-community,robovm/robovm-studio,ftomassetti/intellij-community,ol-loginov/intellij-community,fitermay/intellij-community,allotria/intellij-community,ryano144/intellij-community,jagguli/intellij-community,asedunov/intellij-community,robovm/robovm-studio,idea4bsd/idea4bsd,ernestp/consulo,hurricup/intellij-community,kool79/intellij-community,adedayo/intellij-community,vvv1559/intellij-community,adedayo/intellij-community,diorcety/intellij-community,TangHao1987/intellij-community,fitermay/intellij-community,jagguli/intellij-community,ol-loginov/intellij-community,robovm/robovm-studio,retomerz/intellij-community,holmes/intellij-community,da1z/intellij-community,kool79/intellij-community,signed/intellij-community,apixandru/intellij-community,ivan-fedorov/intellij-community,jagguli/intellij-community,kdwink/intellij-community,tmpgit/intellij-community,jagguli/intellij-community,muntasirsyed/intellij-community,hurricup/intellij-community,joewalnes/idea-community,clumsy/intellij-community,caot/intellij-community,tmpgit/intellij-community,xfournet/intellij-community,SerCeMan/intellij-community,dslomov/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,pwoodworth/intellij-community,ftomassetti/intellij-community,vladmm/intellij-community,signed/intellij-community,tmpgit/intellij-community,da1z/intellij-community,MER-GROUP/intellij-community,clumsy/intellij-community,vvv1559/intellij-community,orekyuu/intellij-community,suncycheng/intellij-community,adedayo/intellij-community,FHannes/intellij-community,salguarnieri/intellij-community,supersven/intellij-community,samthor/intellij-community,fitermay/intellij-community,mglukhikh/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,ol-loginov/intellij-community,kdwink/intellij-community,samthor/intellij-community,lucafavatella/intellij-community,caot/intellij-community,ol-loginov/intellij-community,diorcety/intellij-community,hurricup/intellij-community,slisson/intellij-community,blademainer/intellij-community,FHannes/intellij-community,semonte/intellij-community,pwoodworth/intellij-community,muntasirsyed/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,fengbaicanhe/intellij-community,kdwink/intellij-community,jagguli/intellij-community,youdonghai/intellij-community,nicolargo/intellij-community,dslomov/intellij-community,ivan-fedorov/intellij-community,fengbaicanhe/intellij-community,vvv1559/intellij-community,caot/intellij-community,alphafoobar/intellij-community,youdonghai/intellij-community,vladmm/intellij-community,mglukhikh/intellij-community,amith01994/intellij-community,FHannes/intellij-community,tmpgit/intellij-community,ol-loginov/intellij-community,ol-loginov/intellij-community,vladmm/intellij-community,xfournet/intellij-community,asedunov/intellij-community,slisson/intellij-community,consulo/consulo,MichaelNedzelsky/intellij-community,asedunov/intellij-community,michaelgallacher/intellij-community,pwoodworth/intellij-community,ol-loginov/intellij-community,signed/intellij-community,salguarnieri/intellij-community,holmes/intellij-community,ernestp/consulo,ryano144/intellij-community,ivan-fedorov/intellij-community,youdonghai/intellij-community,jagguli/intellij-community,kool79/intellij-community,Lekanich/intellij-community,ThiagoGarciaAlves/intellij-community,adedayo/intellij-community,fnouama/intellij-community,ftomassetti/intellij-community,akosyakov/intellij-community,dslomov/intellij-community,clumsy/intellij-community,ahb0327/intellij-community,ftomassetti/intellij-community,nicolargo/intellij-community,ivan-fedorov/intellij-community,Distrotech/intellij-community,orekyuu/intellij-community,amith01994/intellij-community,pwoodworth/intellij-community,gnuhub/intellij-community,blademainer/intellij-community,ernestp/consulo,robovm/robovm-studio,apixandru/intellij-community,adedayo/intellij-community,samthor/intellij-community,muntasirsyed/intellij-community,fengbaicanhe/intellij-community,adedayo/intellij-community,nicolargo/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,pwoodworth/intellij-community,orekyuu/intellij-community,adedayo/intellij-community,semonte/intellij-community,wreckJ/intellij-community,izonder/intellij-community,petteyg/intellij-community,asedunov/intellij-community,hurricup/intellij-community,fnouama/intellij-community,SerCeMan/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,tmpgit/intellij-community,slisson/intellij-community,allotria/intellij-community,vvv1559/intellij-community,holmes/intellij-community,ibinti/intellij-community,SerCeMan/intellij-community,dslomov/intellij-community,allotria/intellij-community,tmpgit/intellij-community,MichaelNedzelsky/intellij-community,amith01994/intellij-community,fitermay/intellij-community,apixandru/intellij-community,caot/intellij-community,fengbaicanhe/intellij-community,kdwink/intellij-community,kdwink/intellij-community,mglukhikh/intellij-community,Distrotech/intellij-community,SerCeMan/intellij-community,ftomassetti/intellij-community,suncycheng/intellij-community,wreckJ/intellij-community,Distrotech/intellij-community,MER-GROUP/intellij-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,xfournet/intellij-community,jexp/idea2,asedunov/intellij-community,adedayo/intellij-community,alphafoobar/intellij-community,Lekanich/intellij-community,semonte/intellij-community,blademainer/intellij-community,MichaelNedzelsky/intellij-community,diorcety/intellij-community,signed/intellij-community,holmes/intellij-community,xfournet/intellij-community,da1z/intellij-community,signed/intellij-community,supersven/intellij-community,da1z/intellij-community,adedayo/intellij-community,vladmm/intellij-community,SerCeMan/intellij-community,muntasirsyed/intellij-community,orekyuu/intellij-community,alphafoobar/intellij-community,wreckJ/intellij-community,izonder/intellij-community,consulo/consulo,fitermay/intellij-community,gnuhub/intellij-community,idea4bsd/idea4bsd,kool79/intellij-community,diorcety/intellij-community,ftomassetti/intellij-community,mglukhikh/intellij-community,SerCeMan/intellij-community,Distrotech/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,apixandru/intellij-community,blademainer/intellij-community,fengbaicanhe/intellij-community,amith01994/intellij-community,MER-GROUP/intellij-community,izonder/intellij-community,ahb0327/intellij-community,mglukhikh/intellij-community,pwoodworth/intellij-community,ivan-fedorov/intellij-community,samthor/intellij-community,jagguli/intellij-community,jexp/idea2,samthor/intellij-community,asedunov/intellij-community,holmes/intellij-community,youdonghai/intellij-community,muntasirsyed/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,hurricup/intellij-community,wreckJ/intellij-community,samthor/intellij-community,ivan-fedorov/intellij-community,wreckJ/intellij-community,fnouama/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,fengbaicanhe/intellij-community,fnouama/intellij-community,joewalnes/idea-community,slisson/intellij-community,fnouama/intellij-community,slisson/intellij-community,robovm/robovm-studio,michaelgallacher/intellij-community,robovm/robovm-studio,lucafavatella/intellij-community,retomerz/intellij-community,kdwink/intellij-community,retomerz/intellij-community,slisson/intellij-community,supersven/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,hurricup/intellij-community,nicolargo/intellij-community,holmes/intellij-community,akosyakov/intellij-community,caot/intellij-community,fnouama/intellij-community,orekyuu/intellij-community,diorcety/intellij-community,hurricup/intellij-community,dslomov/intellij-community,kool79/intellij-community,TangHao1987/intellij-community,ryano144/intellij-community,ahb0327/intellij-community,pwoodworth/intellij-community,SerCeMan/intellij-community,SerCeMan/intellij-community,TangHao1987/intellij-community,TangHao1987/intellij-community,samthor/intellij-community,supersven/intellij-community,allotria/intellij-community,fitermay/intellij-community,vladmm/intellij-community,kdwink/intellij-community,semonte/intellij-community,ftomassetti/intellij-community,vladmm/intellij-community,ol-loginov/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,asedunov/intellij-community,apixandru/intellij-community,alphafoobar/intellij-community,caot/intellij-community,michaelgallacher/intellij-community,joewalnes/idea-community,retomerz/intellij-community,holmes/intellij-community,petteyg/intellij-community,ibinti/intellij-community,muntasirsyed/intellij-community,caot/intellij-community,MER-GROUP/intellij-community,blademainer/intellij-community,lucafavatella/intellij-community,Distrotech/intellij-community,orekyuu/intellij-community,gnuhub/intellij-community,supersven/intellij-community,xfournet/intellij-community,blademainer/intellij-community,amith01994/intellij-community,supersven/intellij-community,kool79/intellij-community,joewalnes/idea-community,lucafavatella/intellij-community,signed/intellij-community,fitermay/intellij-community,hurricup/intellij-community,muntasirsyed/intellij-community,joewalnes/idea-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,wreckJ/intellij-community,consulo/consulo,akosyakov/intellij-community,allotria/intellij-community,retomerz/intellij-community,gnuhub/intellij-community,wreckJ/intellij-community,Lekanich/intellij-community,asedunov/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,adedayo/intellij-community,alphafoobar/intellij-community,ivan-fedorov/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,kool79/intellij-community,amith01994/intellij-community,Distrotech/intellij-community,da1z/intellij-community,youdonghai/intellij-community,salguarnieri/intellij-community,retomerz/intellij-community,jexp/idea2,jexp/idea2,jexp/idea2,Lekanich/intellij-community,nicolargo/intellij-community,MER-GROUP/intellij-community,michaelgallacher/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,supersven/intellij-community,SerCeMan/intellij-community,lucafavatella/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,Distrotech/intellij-community,supersven/intellij-community,holmes/intellij-community,supersven/intellij-community,robovm/robovm-studio,clumsy/intellij-community,semonte/intellij-community,gnuhub/intellij-community,mglukhikh/intellij-community,ol-loginov/intellij-community,vvv1559/intellij-community,kool79/intellij-community,FHannes/intellij-community,da1z/intellij-community,allotria/intellij-community,nicolargo/intellij-community,semonte/intellij-community,SerCeMan/intellij-community,slisson/intellij-community,asedunov/intellij-community,fitermay/intellij-community,retomerz/intellij-community,retomerz/intellij-community,clumsy/intellij-community,vvv1559/intellij-community,fitermay/intellij-community,jagguli/intellij-community,blademainer/intellij-community,fitermay/intellij-community,robovm/robovm-studio,da1z/intellij-community,fengbaicanhe/intellij-community,fengbaicanhe/intellij-community,izonder/intellij-community,supersven/intellij-community,retomerz/intellij-community,da1z/intellij-community,MER-GROUP/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,MichaelNedzelsky/intellij-community,apixandru/intellij-community,ivan-fedorov/intellij-community,petteyg/intellij-community,ernestp/consulo,MichaelNedzelsky/intellij-community,da1z/intellij-community,holmes/intellij-community,clumsy/intellij-community,caot/intellij-community,michaelgallacher/intellij-community,ryano144/intellij-community,michaelgallacher/intellij-community,wreckJ/intellij-community,wreckJ/intellij-community,ThiagoGarciaAlves/intellij-community,adedayo/intellij-community,ibinti/intellij-community,caot/intellij-community,samthor/intellij-community,suncycheng/intellij-community,amith01994/intellij-community,suncycheng/intellij-community,TangHao1987/intellij-community,petteyg/intellij-community,idea4bsd/idea4bsd,jexp/idea2,nicolargo/intellij-community,izonder/intellij-community,semonte/intellij-community,xfournet/intellij-community,vladmm/intellij-community,allotria/intellij-community,kool79/intellij-community,MichaelNedzelsky/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,petteyg/intellij-community,suncycheng/intellij-community,jagguli/intellij-community,Lekanich/intellij-community,gnuhub/intellij-community,salguarnieri/intellij-community,supersven/intellij-community,ivan-fedorov/intellij-community,izonder/intellij-community,apixandru/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,orekyuu/intellij-community,Lekanich/intellij-community,dslomov/intellij-community,signed/intellij-community,akosyakov/intellij-community,salguarnieri/intellij-community,dslomov/intellij-community,apixandru/intellij-community,ftomassetti/intellij-community,lucafavatella/intellij-community,pwoodworth/intellij-community,fitermay/intellij-community,dslomov/intellij-community,michaelgallacher/intellij-community,clumsy/intellij-community,tmpgit/intellij-community,allotria/intellij-community,joewalnes/idea-community,suncycheng/intellij-community,Distrotech/intellij-community,amith01994/intellij-community,akosyakov/intellij-community,nicolargo/intellij-community,ftomassetti/intellij-community,xfournet/intellij-community,signed/intellij-community,hurricup/intellij-community,nicolargo/intellij-community,wreckJ/intellij-community,da1z/intellij-community,slisson/intellij-community,xfournet/intellij-community,nicolargo/intellij-community,diorcety/intellij-community,semonte/intellij-community,fnouama/intellij-community,apixandru/intellij-community,ivan-fedorov/intellij-community,diorcety/intellij-community,suncycheng/intellij-community,orekyuu/intellij-community,MER-GROUP/intellij-community,salguarnieri/intellij-community,fengbaicanhe/intellij-community,MichaelNedzelsky/intellij-community,diorcety/intellij-community,slisson/intellij-community,ThiagoGarciaAlves/intellij-community,ryano144/intellij-community,ibinti/intellij-community,diorcety/intellij-community,xfournet/intellij-community,ivan-fedorov/intellij-community,holmes/intellij-community,salguarnieri/intellij-community,vvv1559/intellij-community,jagguli/intellij-community,gnuhub/intellij-community,MichaelNedzelsky/intellij-community,diorcety/intellij-community,caot/intellij-community,Lekanich/intellij-community,asedunov/intellij-community,amith01994/intellij-community,supersven/intellij-community,youdonghai/intellij-community,lucafavatella/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,holmes/intellij-community,ryano144/intellij-community,jagguli/intellij-community,samthor/intellij-community,apixandru/intellij-community,ahb0327/intellij-community,suncycheng/intellij-community,nicolargo/intellij-community,joewalnes/idea-community,pwoodworth/intellij-community,vladmm/intellij-community,FHannes/intellij-community,signed/intellij-community,gnuhub/intellij-community,petteyg/intellij-community,ahb0327/intellij-community,jagguli/intellij-community,clumsy/intellij-community,ThiagoGarciaAlves/intellij-community,nicolargo/intellij-community,ibinti/intellij-community,muntasirsyed/intellij-community,ernestp/consulo,MER-GROUP/intellij-community,amith01994/intellij-community,robovm/robovm-studio,robovm/robovm-studio,signed/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,muntasirsyed/intellij-community,caot/intellij-community,slisson/intellij-community,idea4bsd/idea4bsd,jexp/idea2,alphafoobar/intellij-community,kdwink/intellij-community,joewalnes/idea-community | package com.intellij.openapi.wm.impl;
import com.intellij.Patches;
import com.intellij.ide.IdeEventQueue;
import com.intellij.ide.ui.LafManager;
import com.intellij.ide.ui.LafManagerListener;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationAdapter;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.impl.LaterInvocator;
import com.intellij.openapi.components.ProjectComponent;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileEditor.ex.FileEditorManagerEx;
import com.intellij.openapi.project.DumbAwareRunnable;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.startup.StartupManager;
import com.intellij.openapi.ui.MessageType;
import com.intellij.openapi.ui.popup.Balloon;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.util.*;
import com.intellij.openapi.wm.*;
import com.intellij.openapi.wm.ex.*;
import com.intellij.openapi.wm.impl.commands.*;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.util.Alarm;
import com.intellij.util.ArrayUtil;
import com.intellij.util.ReflectionUtil;
import com.intellij.util.containers.CollectionFactory;
import com.intellij.util.containers.HashMap;
import com.intellij.util.containers.HashSet;
import com.intellij.util.ui.UIUtil;
import com.intellij.util.ui.update.UiNotifyConnector;
import gnu.trove.THashSet;
import org.jdom.Element;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.EventListenerList;
import javax.swing.event.HyperlinkListener;
import java.awt.*;
import java.awt.event.FocusEvent;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.lang.ref.WeakReference;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Set;
/**
* @author Anton Katilin
* @author Vladimir Kondratyev
*/
public final class ToolWindowManagerImpl extends ToolWindowManagerEx implements ProjectComponent, JDOMExternalizable {
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.wm.impl.ToolWindowManagerImpl");
private final Project myProject;
private final WindowManagerEx myWindowManager;
private final EventListenerList myListenerList;
private final DesktopLayout myLayout;
private final HashMap<String, InternalDecorator> myId2InternalDecorator;
private final HashMap<String, FloatingDecorator> myId2FloatingDecorator;
private final HashMap<String, StripeButton> myId2StripeButton;
private final HashMap<String, FocusWatcher> myId2FocusWatcher;
private final Set<String> myDumbAwareIds = CollectionFactory.newTroveSet();
private final EditorComponentFocusWatcher myEditorComponentFocusWatcher;
private final MyToolWindowPropertyChangeListener myToolWindowPropertyChangeListener;
private final InternalDecoratorListener myInternalDecoratorListener;
private final MyUIManagerPropertyChangeListener myUIManagerPropertyChangeListener;
private final MyLafManagerListener myLafManagerListener;
private boolean myEditorComponentActive;
private final ActiveStack myActiveStack;
private final SideStack mySideStack;
private ToolWindowsPane myToolWindowsPane;
private IdeFrameImpl myFrame;
private DesktopLayout myLayoutToRestoreLater = null;
@NonNls private static final String EDITOR_ELEMENT = "editor";
@NonNls private static final String ACTIVE_ATTR_VALUE = "active";
@NonNls private static final String FRAME_ELEMENT = "frame";
@NonNls private static final String X_ATTR = "x";
@NonNls private static final String Y_ATTR = "y";
@NonNls private static final String WIDTH_ATTR = "width";
@NonNls private static final String HEIGHT_ATTR = "height";
@NonNls private static final String EXTENDED_STATE_ATTR = "extended-state";
private final Alarm myFocusedComponentAlaram;
private final Alarm myForcedFocusRequestsAlarm;
private final Alarm myIdleAlarm;
private final Set<Runnable> myIdleRequests = new HashSet<Runnable>();
private final Runnable myFlushRunnable = new Runnable() {
public void run() {
flushIdleRequests();
}
};
private ActiveRunnable myRequestFocusCmd;
private WeakReference<FocusCommand> myLastForcedRequest = new WeakReference<FocusCommand>(null);
private Application myApp;
private AppListener myAppListener;
private FocusCommand myFocusCommandOnAppActivation;
private ActionCallback myCallbackOnActivation;
private WeakReference<Component> myFocusedComponentOnDeactivation;
private WeakReference<Component> myLastFocusedProjectComponent;
/**
* invoked by reflection
*/
public ToolWindowManagerImpl(final Project project, WindowManagerEx windowManagerEx, Application app) {
myProject = project;
myWindowManager = windowManagerEx;
myListenerList = new EventListenerList();
myLayout = new DesktopLayout();
myLayout.copyFrom(windowManagerEx.getLayout());
myId2InternalDecorator = new HashMap<String, InternalDecorator>();
myId2FloatingDecorator = new HashMap<String, FloatingDecorator>();
myId2StripeButton = new HashMap<String, StripeButton>();
myId2FocusWatcher = new HashMap<String, FocusWatcher>();
myEditorComponentFocusWatcher = new EditorComponentFocusWatcher();
myToolWindowPropertyChangeListener = new MyToolWindowPropertyChangeListener();
myInternalDecoratorListener = new MyInternalDecoratorListener();
myUIManagerPropertyChangeListener = new MyUIManagerPropertyChangeListener();
myLafManagerListener = new MyLafManagerListener();
myEditorComponentActive = false;
myActiveStack = new ActiveStack();
mySideStack = new SideStack();
myFocusedComponentAlaram = new Alarm(Alarm.ThreadToUse.SWING_THREAD, project);
myForcedFocusRequestsAlarm = new Alarm(Alarm.ThreadToUse.SWING_THREAD, project);
myIdleAlarm = new Alarm(Alarm.ThreadToUse.SWING_THREAD, project);
myApp = app;
myAppListener = new AppListener();
myApp.addApplicationListener(myAppListener);
IdeEventQueue.getInstance().addDispatcher(new IdeEventQueue.EventDispatcher() {
public boolean dispatch(AWTEvent e) {
if (e instanceof FocusEvent) {
final FocusEvent fe = (FocusEvent)e;
final Component c = fe.getComponent();
final IdeFrameImpl frame = myWindowManager.getFrame(myProject);
if (c instanceof Window || c == null || frame == null) return false;
if (isProjectComponent(c)) {
if (fe.getID() == FocusEvent.FOCUS_GAINED) {
myLastFocusedProjectComponent = new WeakReference<Component>(c);
}
}
}
return false;
}
}, myProject);
}
private Component getLastFocusedProjectComponent() {
return myLastFocusedProjectComponent != null ? myLastFocusedProjectComponent.get() : null;
}
public Project getProject() {
return myProject;
}
public void initComponent() {
}
public void disposeComponent() {
myApp.removeApplicationListener(myAppListener);
}
public void projectOpened() {
UIManager.addPropertyChangeListener(myUIManagerPropertyChangeListener);
LafManager.getInstance().addLafManagerListener(myLafManagerListener);
myFrame = myWindowManager.allocateFrame(myProject);
LOG.assertTrue(myFrame != null);
final ArrayList<FinalizableCommand> commandsList = new ArrayList<FinalizableCommand>();
myToolWindowsPane = new ToolWindowsPane(myFrame, this);
((IdeRootPane)myFrame.getRootPane()).setToolWindowsPane(myToolWindowsPane);
appendUpdateToolWindowsPaneCmd(commandsList);
myFrame.setTitle(FrameTitleBuilder.getInstance().getProjectTitle(myProject));
final JComponent editorComponent = FileEditorManagerEx.getInstanceEx(myProject).getComponent();
myEditorComponentFocusWatcher.install(editorComponent);
appendSetEditorComponentCmd(editorComponent, commandsList);
if (myEditorComponentActive) {
activateEditorComponentImpl(commandsList, true);
}
execute(commandsList);
final DumbService.DumbModeListener dumbModeListener = new DumbService.DumbModeListener() {
private final Set<String> hiddenIds = new THashSet<String>();
public void enteredDumbMode() {
}
public void beforeEnteringDumbMode() {
for (final String id : getToolWindowIds()) {
if (!myDumbAwareIds.contains(id)) {
if (isToolWindowVisible(id)) {
hiddenIds.add(id);
hideToolWindow(id, true);
}
getStripeButton(id).setEnabled(false);
}
}
}
public void exitDumbMode() {
for (final String id : getToolWindowIds()) {
getStripeButton(id).setEnabled(true);
}
for (final String id : hiddenIds) {
showToolWindow(id);
}
hiddenIds.clear();
}
};
myProject.getMessageBus().connect().subscribe(DumbService.DUMB_MODE, dumbModeListener);
StartupManager.getInstance(myProject).registerPostStartupActivity(new DumbAwareRunnable() {
public void run() {
registerToolWindowsFromBeans();
if (DumbService.getInstance().isDumb()) {
dumbModeListener.beforeEnteringDumbMode();
}
}
});
}
private void registerToolWindowsFromBeans() {
ToolWindowEP[] beans = Extensions.getExtensions(ToolWindowEP.EP_NAME);
for (final ToolWindowEP bean : beans) {
final Condition condition = bean.getCondition();
if (condition != null && !condition.value(myProject)) {
continue;
}
ToolWindowAnchor toolWindowAnchor;
try {
toolWindowAnchor = ToolWindowAnchor.fromText(bean.anchor);
}
catch (Exception e) {
LOG.error(e);
continue;
}
JLabel label = new JLabel("Initializing toolwindow...");
final ToolWindow toolWindow = registerToolWindow(bean.id, label, toolWindowAnchor, myProject);
if (bean.icon != null) {
toolWindow.setIcon(IconLoader.getIcon(bean.icon));
}
toolWindow.setSplitMode(bean.secondary, null);
UiNotifyConnector.doWhenFirstShown(label, new Runnable() {
public void run() {
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
ToolWindowFactory factory = bean.getToolWindowFactory();
toolWindow.getContentManager().removeAllContents(false);
factory.createToolWindowContent(myProject, toolWindow);
}
});
}
});
}
}
public void projectClosed() {
UIManager.removePropertyChangeListener(myUIManagerPropertyChangeListener);
LafManager.getInstance().removeLafManagerListener(myLafManagerListener);
final ArrayList<FinalizableCommand> commandsList = new ArrayList<FinalizableCommand>();
final String[] ids = getToolWindowIds();
// Remove ToolWindowsPane
((IdeRootPane)myFrame.getRootPane()).setToolWindowsPane(null);
myWindowManager.releaseFrame(myFrame);
appendUpdateToolWindowsPaneCmd(commandsList);
// Hide all tool windows
for (final String id : ids) {
deactivateToolWindowImpl(id, true, commandsList);
}
// Remove editor component
final JComponent editorComponent = FileEditorManagerEx.getInstanceEx(myProject).getComponent();
myEditorComponentFocusWatcher.deinstall(editorComponent);
appendSetEditorComponentCmd(null, commandsList);
execute(commandsList);
}
public void addToolWindowManagerListener(final ToolWindowManagerListener l) {
myListenerList.add(ToolWindowManagerListener.class, l);
}
public void removeToolWindowManagerListener(final ToolWindowManagerListener l) {
myListenerList.remove(ToolWindowManagerListener.class, l);
}
/**
* This is helper method. It delegated its fuctionality to the WindowManager.
* Before delegating it fires state changed.
*/
private void execute(final ArrayList<FinalizableCommand> commandList) {
fireStateChanged();
for (FinalizableCommand each : commandList) {
each.beforeExecute(this);
}
myWindowManager.getCommandProcessor().execute(commandList, myProject.getDisposed());
}
public void activateEditorComponent() {
activateEditorComponent(true);
}
private void activateEditorComponent(boolean forced) {
if (LOG.isDebugEnabled()) {
LOG.debug("enter: activateEditorComponent()");
}
ApplicationManager.getApplication().assertIsDispatchThread();
final ArrayList<FinalizableCommand> commandList = new ArrayList<FinalizableCommand>();
activateEditorComponentImpl(commandList, forced);
execute(commandList);
}
private void activateEditorComponentImpl(final ArrayList<FinalizableCommand> commandList, final boolean forced) {
final String active = getActiveToolWindowId();
// Now we have to request focus into most recent focused editor
appendRequestFocusInEditorComponentCmd(commandList, forced).doWhenDone(new Runnable() {
public void run() {
final ArrayList<FinalizableCommand> postExecute = new ArrayList<FinalizableCommand>();
if (LOG.isDebugEnabled()) {
LOG.debug("editor activated");
}
deactivateWindows(postExecute, null);
myActiveStack.clear();
myEditorComponentActive = true;
execute(postExecute);
}
}).doWhenRejected(new Runnable() {
public void run() {
if (forced) {
requestFocus(new FocusCommand() {
public ActionCallback run() {
final ArrayList<FinalizableCommand> cmds = new ArrayList<FinalizableCommand>();
final WindowInfoImpl toReactivate = getInfo(active);
final boolean reactivateLastActive = toReactivate != null && !isToHide(toReactivate);
deactivateWindows(cmds, reactivateLastActive ? active : null);
execute(cmds);
if (reactivateLastActive) {
activateToolWindow(active, false, true);
}
else {
if (active != null) {
myActiveStack.remove(active, false);
}
if (!myActiveStack.isEmpty()) {
activateToolWindow(myActiveStack.peek(), false, true);
}
}
return new ActionCallback.Done();
}
}, false);
}
}
});
}
private void deactivateWindows(final ArrayList<FinalizableCommand> postExecute, String idToIgnore) {
final WindowInfoImpl[] infos = myLayout.getInfos();
for (final WindowInfoImpl info : infos) {
final boolean shouldHide = isToHide(info);
if (idToIgnore != null && idToIgnore.equals(info.getId())) {
continue;
}
deactivateToolWindowImpl(info.getId(), shouldHide, postExecute);
}
}
private boolean isToHide(final WindowInfoImpl info) {
return (info.isAutoHide() || info.isSliding()) && !(info.isFloating() && hasModalChild(info));
}
/**
* Helper method. It makes window visible, activates it and request focus into the tool window.
* But it doesn't deactivate other tool windows. Use <code>prepareForActivation</code> method to
* deactivates other tool windows.
*
* @param dirtyMode if <code>true</code> then all UI operations are performed in "dirty" mode.
* It means that UI isn't validated and repainted just after each add/remove operation.
* @see ToolWindowManagerImpl#prepareForActivation
*/
private void showAndActivate(final String id,
final boolean dirtyMode,
final ArrayList<FinalizableCommand> commandsList,
boolean autoFocusContents) {
if (!getToolWindow(id).isAvailable()) {
return;
}
// show activated
final WindowInfoImpl info = getInfo(id);
boolean toApplyInfo = false;
if (!info.isActive()) {
info.setActive(true);
toApplyInfo = true;
}
showToolWindowImpl(id, dirtyMode, commandsList);
// activate
if (toApplyInfo) {
appendApplyWindowInfoCmd(info, commandsList);
myActiveStack.push(id);
myEditorComponentActive = false;
}
if (autoFocusContents) {
appendRequestFocusInToolWindowCmd(id, commandsList, true);
}
}
void activateToolWindow(final String id, boolean forced, boolean autoFocusContents) {
if (LOG.isDebugEnabled()) {
LOG.debug("enter: activateToolWindow(" + id + ")");
}
ApplicationManager.getApplication().assertIsDispatchThread();
checkId(id);
if (DumbService.getInstance().isDumb() && !myDumbAwareIds.contains(id)) {
return;
}
final ArrayList<FinalizableCommand> commandList = new ArrayList<FinalizableCommand>();
activateToolWindowImpl(id, commandList, forced, autoFocusContents);
execute(commandList);
}
private void activateToolWindowImpl(final String id,
final ArrayList<FinalizableCommand> commandList,
boolean forced,
boolean autoFocusContents) {
if (!isUnforcedRequestAllowed() && !forced) return;
if (LOG.isDebugEnabled()) {
LOG.debug("enter: activateToolWindowImpl(" + id + ")");
}
if (!getToolWindow(id).isAvailable()) {
// Tool window can be "logically" active but not focused. For example,
// when the user switched to another application. So we just need to bring
// tool window's window to front.
final InternalDecorator decorator = getInternalDecorator(id);
if (!decorator.hasFocus() && autoFocusContents) {
appendRequestFocusInToolWindowCmd(id, commandList, forced);
}
return;
}
prepareForActivation(id, commandList);
showAndActivate(id, false, commandList, autoFocusContents);
}
/**
* Checkes whether the specified <code>id</code> defines installed tool
* window. If it's not then throws <code>IllegalStateException</code>.
*
* @throws IllegalStateException if tool window isn't installed.
*/
private void checkId(final String id) {
if (!myLayout.isToolWindowRegistered(id)) {
throw new IllegalStateException("window with id=\"" + id + "\" isn't registered");
}
}
/**
* Helper method. It deactivates (and hides) window with specified <code>id</code>.
*
* @param id <code>id</code> of the tool window to be deactivated.
* @param shouldHide if <code>true</code> then also hides specified tool window.
*/
private void deactivateToolWindowImpl(final String id, final boolean shouldHide, final List<FinalizableCommand> commandsList) {
if (LOG.isDebugEnabled()) {
LOG.debug("enter: deactivateToolWindowImpl(" + id + "," + shouldHide + ")");
}
final WindowInfoImpl info = getInfo(id);
if (shouldHide && info.isVisible()) {
info.setVisible(false);
if (info.isFloating()) {
appendRemoveFloatingDecoratorCmd(info, commandsList);
}
else { // docked and sliding windows
appendRemoveDecoratorCmd(id, false, commandsList);
}
}
info.setActive(false);
appendApplyWindowInfoCmd(info, commandsList);
}
public String[] getToolWindowIds() {
ApplicationManager.getApplication().assertIsDispatchThread();
final WindowInfoImpl[] infos = myLayout.getInfos();
final String[] ids = ArrayUtil.newStringArray(infos.length);
for (int i = 0; i < infos.length; i++) {
ids[i] = infos[i].getId();
}
return ids;
}
public String getActiveToolWindowId() {
ApplicationManager.getApplication().assertIsDispatchThread();
return myLayout.getActiveId();
}
public String getLastActiveToolWindowId() {
return getLastActiveToolWindowId(null);
}
public String getLastActiveToolWindowId(Condition<JComponent> condition) {
ApplicationManager.getApplication().assertIsDispatchThread();
String lastActiveToolWindowId = null;
for (int i = 0; i < myActiveStack.getPersistentSize(); i++) {
final String id = myActiveStack.peekPersistent(i);
final ToolWindow toolWindow = getToolWindow(id);
LOG.assertTrue(toolWindow != null);
if (toolWindow.isAvailable()) {
if (condition == null || condition.value(toolWindow.getComponent())) {
lastActiveToolWindowId = id;
break;
}
}
}
return lastActiveToolWindowId;
}
/**
* @return floating decorator for the tool window with specified <code>ID</code>.
*/
private FloatingDecorator getFloatingDecorator(final String id) {
return myId2FloatingDecorator.get(id);
}
/**
* @return internal decorator for the tool window with specified <code>ID</code>.
*/
private InternalDecorator getInternalDecorator(final String id) {
return myId2InternalDecorator.get(id);
}
/**
* @return tool button for the window with specified <code>ID</code>.
*/
private StripeButton getStripeButton(final String id) {
return myId2StripeButton.get(id);
}
/**
* @return info for the tool window with specified <code>ID</code>.
*/
private WindowInfoImpl getInfo(final String id) {
return myLayout.getInfo(id, true);
}
public List<String> getIdsOn(final ToolWindowAnchor anchor) {
return myLayout.getVisibleIdsOn(anchor, this);
}
public ToolWindow getToolWindow(final String id) {
ApplicationManager.getApplication().assertIsDispatchThread();
if (!myLayout.isToolWindowRegistered(id)) {
return null;
}
return getInternalDecorator(id).getToolWindow();
}
void showToolWindow(final String id) {
if (LOG.isDebugEnabled()) {
LOG.debug("enter: showToolWindow(" + id + ")");
}
ApplicationManager.getApplication().assertIsDispatchThread();
final ArrayList<FinalizableCommand> commandList = new ArrayList<FinalizableCommand>();
showToolWindowImpl(id, false, commandList);
execute(commandList);
}
public void hideToolWindow(final String id, final boolean hideSide) {
ApplicationManager.getApplication().assertIsDispatchThread();
checkId(id);
final WindowInfoImpl info = getInfo(id);
if (!info.isVisible()) return;
final ArrayList<FinalizableCommand> commandList = new ArrayList<FinalizableCommand>();
final boolean wasActive = info.isActive();
// hide and deactivate
deactivateToolWindowImpl(id, true, commandList);
if (hideSide || info.isFloating()) {
while (!mySideStack.isEmpty(info.getAnchor())) {
mySideStack.pop(info.getAnchor());
}
final String[] all = getToolWindowIds();
for (String eachId : all) {
final WindowInfoImpl eachInfo = getInfo(eachId);
if (eachInfo.isVisible() && eachInfo.getAnchor() == info.getAnchor()) {
deactivateToolWindowImpl(eachId, true, commandList);
}
}
activateEditorComponentImpl(commandList, true);
}
else {
// first of all we have to find tool window that was located at the same side and
// was hidden.
WindowInfoImpl info2 = null;
while (!mySideStack.isEmpty(info.getAnchor())) {
final WindowInfoImpl storedInfo = mySideStack.pop(info.getAnchor());
final WindowInfoImpl currentInfo = getInfo(storedInfo.getId());
LOG.assertTrue(currentInfo != null);
// SideStack contains copies of real WindowInfos. It means that
// these stored infos can be invalid. The following loop removes invalid WindowInfos.
if (storedInfo.getAnchor() == currentInfo.getAnchor() &&
storedInfo.getType() == currentInfo.getType() &&
storedInfo.isAutoHide() == currentInfo.isAutoHide()) {
info2 = storedInfo;
break;
}
}
if (info2 != null) {
showToolWindowImpl(info2.getId(), false, commandList);
}
// If we hide currently active tool window then we should activate the previous
// one which is located in the tool window stack.
// Activate another tool window if no active tool window exists and
// window stack is enabled.
myActiveStack.remove(id, false); // hidden window should be at the top of stack
if (wasActive) {
if (myActiveStack.isEmpty()) {
activateEditorComponentImpl(commandList, false);
}
else {
final String toBeActivatedId = myActiveStack.pop();
if (toBeActivatedId != null) {
activateToolWindowImpl(toBeActivatedId, commandList, false, true);
}
}
}
}
execute(commandList);
}
/**
* @param dirtyMode if <code>true</code> then all UI operations are performed in dirty mode.
*/
private void showToolWindowImpl(final String id, final boolean dirtyMode, final List<FinalizableCommand> commandsList) {
final WindowInfoImpl toBeShownInfo = getInfo(id);
if (toBeShownInfo.isVisible() || !getToolWindow(id).isAvailable()) {
return;
}
toBeShownInfo.setVisible(true);
final InternalDecorator decorator = getInternalDecorator(id);
if (toBeShownInfo.isFloating()) {
commandsList.add(new AddFloatingDecoratorCmd(decorator, toBeShownInfo));
}
else { // docked and sliding windows
// If there is tool window on the same side then we have to hide it, i.e.
// clear place for tool window to be shown.
//
// We store WindowInfo of hidden tool window in the SideStack (if the tool window
// is docked and not auto-hide one). Therefore it's possible to restore the
// hidden tool window when showing tool window will be closed.
final WindowInfoImpl[] infos = myLayout.getInfos();
for (final WindowInfoImpl info : infos) {
if (id.equals(info.getId())) {
continue;
}
if (info.isVisible() &&
info.getType() == toBeShownInfo.getType() &&
info.getAnchor() == toBeShownInfo.getAnchor() &&
info.isSplit() == toBeShownInfo.isSplit()) {
// hide and deactivate tool window
info.setVisible(false);
appendRemoveDecoratorCmd(info.getId(), false, commandsList);
if (info.isActive()) {
info.setActive(false);
}
appendApplyWindowInfoCmd(info, commandsList);
// store WindowInfo into the SideStack
if (info.isDocked() && !info.isAutoHide()) {
mySideStack.push(info);
}
}
}
appendAddDecoratorCmd(decorator, toBeShownInfo, dirtyMode, commandsList);
// Remove tool window from the SideStack.
mySideStack.remove(id);
}
appendApplyWindowInfoCmd(toBeShownInfo, commandsList);
}
public ToolWindow registerToolWindow(@NotNull final String id,
@NotNull final JComponent component,
@NotNull final ToolWindowAnchor anchor) {
return registerToolWindow(id, component, anchor, false, false, false);
}
public ToolWindow registerToolWindow(@NotNull final String id, final boolean canCloseContent, @NotNull final ToolWindowAnchor anchor) {
return registerToolWindow(id, null, anchor, false, canCloseContent, false);
}
public ToolWindow registerToolWindow(@NotNull final String id,
final boolean canCloseContent,
@NotNull final ToolWindowAnchor anchor,
final boolean sideTool) {
return registerToolWindow(id, null, anchor, sideTool, canCloseContent, false);
}
public ToolWindow registerToolWindow(@NotNull final String id, final boolean canCloseContent, @NotNull final ToolWindowAnchor anchor,
final Disposable parentDisposable, final boolean canWorkInDumbMode) {
return registerDisposable(id, parentDisposable, registerToolWindow(id, null, anchor, false, canCloseContent, canWorkInDumbMode));
}
private ToolWindow registerToolWindow(@NotNull final String id,
@Nullable final JComponent component,
@NotNull final ToolWindowAnchor anchor,
boolean sideTool,
boolean canCloseContent, final boolean canWorkInDumbMode) {
if (LOG.isDebugEnabled()) {
LOG.debug("enter: installToolWindow(" + id + "," + component + "," + anchor + "\")");
}
ApplicationManager.getApplication().assertIsDispatchThread();
if (myLayout.isToolWindowRegistered(id)) {
throw new IllegalArgumentException("window with id=\"" + id + "\" is already registered");
}
final WindowInfoImpl info = myLayout.register(id, anchor, sideTool);
final boolean wasActive = info.isActive();
final boolean wasVisible = info.isVisible();
info.setActive(false);
info.setVisible(false);
// Create decorator
final ToolWindowImpl toolWindow = new ToolWindowImpl(this, id, canCloseContent, component);
final InternalDecorator decorator = new InternalDecorator(myProject, info.copy(), toolWindow);
myId2InternalDecorator.put(id, decorator);
decorator.addInternalDecoratorListener(myInternalDecoratorListener);
toolWindow.addPropertyChangeListener(myToolWindowPropertyChangeListener);
myId2FocusWatcher.put(id, new ToolWindowFocusWatcher(toolWindow));
if (canWorkInDumbMode) {
myDumbAwareIds.add(id);
}
// Create and show tool button
final StripeButton button = new StripeButton(decorator, myToolWindowsPane);
myId2StripeButton.put(id, button);
final ArrayList<FinalizableCommand> commandsList = new ArrayList<FinalizableCommand>();
appendAddButtonCmd(button, info, commandsList);
// If preloaded info is visible or active then we have to show/activate the installed
// tool window. This step has sense only for windows which are not in the autohide
// mode. But if tool window was active but its mode doen't allow to activate it again
// (for example, tool window is in autohide mode) then we just activate editor component.
if (!info.isAutoHide() && (info.isDocked() || info.isFloating())) {
if (wasActive) {
activateToolWindowImpl(info.getId(), commandsList, true, true);
}
else if (wasVisible) {
showToolWindowImpl(info.getId(), false, commandsList);
}
}
else if (wasActive) { // tool window was active but it cannot be activate again
activateEditorComponentImpl(commandsList, true);
}
execute(commandsList);
fireToolWindowRegistered(id);
return toolWindow;
}
public ToolWindow registerToolWindow(@NotNull final String id,
@NotNull JComponent component,
@NotNull ToolWindowAnchor anchor,
Disposable parentDisposable) {
return registerDisposable(id, parentDisposable, registerToolWindow(id, component, anchor));
}
private ToolWindow registerDisposable(final String id, final Disposable parentDisposable, final ToolWindow window) {
Disposer.register(parentDisposable, new Disposable() {
public void dispose() {
unregisterToolWindow(id);
}
});
return window;
}
public void unregisterToolWindow(@NotNull final String id) {
if (LOG.isDebugEnabled()) {
LOG.debug("enter: unregisterToolWindow(" + id + ")");
}
ApplicationManager.getApplication().assertIsDispatchThread();
if (!myLayout.isToolWindowRegistered(id)) {
return;
}
final WindowInfoImpl info = getInfo(id);
final ToolWindowEx toolWindow = (ToolWindowEx)getToolWindow(id);
// Save recent appearance of tool window
myLayout.unregister(id);
// Remove decorator and tool button from the screen
final ArrayList<FinalizableCommand> commandsList = new ArrayList<FinalizableCommand>();
if (info.isVisible()) {
info.setVisible(false);
if (info.isFloating()) {
appendRemoveFloatingDecoratorCmd(info, commandsList);
}
else { // floating and sliding windows
appendRemoveDecoratorCmd(id, false, commandsList);
}
}
appendRemoveButtonCmd(id, commandsList);
appendApplyWindowInfoCmd(info, commandsList);
execute(commandsList);
// Remove all references on tool window and save its last properties
toolWindow.removePropertyChangeListener(myToolWindowPropertyChangeListener);
myActiveStack.remove(id, true);
mySideStack.remove(id);
// Destroy stripe button
final StripeButton button = getStripeButton(id);
button.dispose();
myId2StripeButton.remove(id);
//
myId2FocusWatcher.remove(id);
// Destroy decorator
final InternalDecorator decorator = getInternalDecorator(id);
decorator.dispose();
decorator.removeInternalDecoratorListener(myInternalDecoratorListener);
myId2InternalDecorator.remove(id);
}
public DesktopLayout getLayout() {
ApplicationManager.getApplication().assertIsDispatchThread();
return myLayout;
}
public void setLayoutToRestoreLater(DesktopLayout layout) {
myLayoutToRestoreLater = layout;
}
public DesktopLayout getLayoutToRestoreLater() {
return myLayoutToRestoreLater;
}
public void setLayout(final DesktopLayout layout) {
ApplicationManager.getApplication().assertIsDispatchThread();
final ArrayList<FinalizableCommand> commandList = new ArrayList<FinalizableCommand>();
// hide tool window that are invisible in new layout
final WindowInfoImpl[] currentInfos = myLayout.getInfos();
for (final WindowInfoImpl currentInfo : currentInfos) {
final WindowInfoImpl info = layout.getInfo(currentInfo.getId(), false);
if (info == null) {
continue;
}
if (currentInfo.isVisible() && !info.isVisible()) {
deactivateToolWindowImpl(currentInfo.getId(), true, commandList);
}
}
// change anchor of tool windows
for (final WindowInfoImpl currentInfo : currentInfos) {
final WindowInfoImpl info = layout.getInfo(currentInfo.getId(), false);
if (info == null) {
continue;
}
if (currentInfo.getAnchor() != info.getAnchor() || currentInfo.getOrder() != info.getOrder()) {
setToolWindowAnchorImpl(currentInfo.getId(), info.getAnchor(), info.getOrder(), commandList);
}
}
// change types of tool windows
for (final WindowInfoImpl currentInfo : currentInfos) {
final WindowInfoImpl info = layout.getInfo(currentInfo.getId(), false);
if (info == null) {
continue;
}
if (currentInfo.getType() != info.getType()) {
setToolWindowTypeImpl(currentInfo.getId(), info.getType(), commandList);
}
}
// change auto-hide state
for (final WindowInfoImpl currentInfo : currentInfos) {
final WindowInfoImpl info = layout.getInfo(currentInfo.getId(), false);
if (info == null) {
continue;
}
if (currentInfo.isAutoHide() != info.isAutoHide()) {
setToolWindowAutoHideImpl(currentInfo.getId(), info.isAutoHide(), commandList);
}
}
// restore visibility
for (final WindowInfoImpl currentInfo : currentInfos) {
final WindowInfoImpl info = layout.getInfo(currentInfo.getId(), false);
if (info == null) {
continue;
}
if (info.isVisible()) {
showToolWindowImpl(currentInfo.getId(), false, commandList);
}
}
// if there is no any active tool window and editor is also inactive
// then activate editor
if (!myEditorComponentActive && getActiveToolWindowId() == null) {
activateEditorComponentImpl(commandList, true);
}
execute(commandList);
}
public void invokeLater(final Runnable runnable) {
final ArrayList<FinalizableCommand> commandList = new ArrayList<FinalizableCommand>();
commandList.add(new InvokeLaterCmd(runnable, myWindowManager.getCommandProcessor()));
execute(commandList);
}
public IdeFocusManager getFocusManager() {
return IdeFocusManager.getInstance(myProject);
}
@Override
public void notifyByBalloon(@NotNull final String toolWindowId, @NotNull final MessageType type, @NotNull final String htmlBody) {
notifyByBalloon(toolWindowId, type, htmlBody, null, null);
}
public void notifyByBalloon(@NotNull final String toolWindowId,
final MessageType type,
@NotNull final String text,
@Nullable final Icon icon,
@Nullable HyperlinkListener listener) {
checkId(toolWindowId);
final Stripe stripe = myToolWindowsPane.getStripeFor(toolWindowId);
final ToolWindowImpl window = getInternalDecorator(toolWindowId).getToolWindow();
if (!window.isAvailable()) {
window.setPlaceholderMode(true);
stripe.updateState();
stripe.revalidate();
stripe.repaint();
}
final ToolWindowAnchor anchor = getInfo(toolWindowId).getAnchor();
final Ref<Balloon.Position> position = Ref.create(Balloon.Position.below);
if (ToolWindowAnchor.TOP == anchor) {
position.set(Balloon.Position.below);
}
else if (ToolWindowAnchor.BOTTOM == anchor) {
position.set(Balloon.Position.above);
}
else if (ToolWindowAnchor.LEFT == anchor) {
position.set(Balloon.Position.atRight);
}
else if (ToolWindowAnchor.RIGHT == anchor) {
position.set(Balloon.Position.atLeft);
}
Icon actualIcon = icon != null ? icon : type.getDefaultIcon();
final Balloon balloon =
JBPopupFactory.getInstance().createHtmlTextBalloonBuilder(text.replace("\n", "<br>"), actualIcon, type.getPopupBackground(), listener)
.createBalloon();
Disposer.register(balloon, new Disposable() {
public void dispose() {
window.setPlaceholderMode(false);
stripe.updateState();
stripe.revalidate();
stripe.repaint();
}
});
final StripeButton button = stripe.getButtonFor(toolWindowId);
if (button == null) return;
final Runnable show = new Runnable() {
public void run() {
if (button.isShowing()) {
final Point point = new Point(button.getBounds().width / 2, button.getHeight() / 2 - 2);
balloon.show(new RelativePoint(button, point), position.get());
}
else {
final Rectangle bounds = myToolWindowsPane.getBounds();
final Point target = UIUtil.getCenterPoint(bounds, new Dimension(1, 1));
if (ToolWindowAnchor.TOP == anchor) {
target.y = 0;
}
else if (ToolWindowAnchor.BOTTOM == anchor) {
target.y = bounds.height;
}
else if (ToolWindowAnchor.LEFT == anchor) {
target.x = 0;
}
else if (ToolWindowAnchor.RIGHT == anchor) {
target.x = bounds.width;
}
balloon.show(new RelativePoint(myToolWindowsPane, target), position.get());
}
}
};
if (!button.isValid()) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
show.run();
}
});
}
else {
show.run();
}
}
public boolean isEditorComponentActive() {
ApplicationManager.getApplication().assertIsDispatchThread();
return myEditorComponentActive;
}
ToolWindowAnchor getToolWindowAnchor(final String id) {
ApplicationManager.getApplication().assertIsDispatchThread();
checkId(id);
return getInfo(id).getAnchor();
}
void setToolWindowAnchor(final String id, final ToolWindowAnchor anchor) {
ApplicationManager.getApplication().assertIsDispatchThread();
setToolWindowAnchor(id, anchor, -1);
}
void setToolWindowAnchor(final String id, final ToolWindowAnchor anchor, final int order) {
ApplicationManager.getApplication().assertIsDispatchThread();
final ArrayList<FinalizableCommand> commandList = new ArrayList<FinalizableCommand>();
setToolWindowAnchorImpl(id, anchor, order, commandList);
execute(commandList);
}
private void setToolWindowAnchorImpl(final String id,
final ToolWindowAnchor anchor,
final int order,
final ArrayList<FinalizableCommand> commandsList) {
checkId(id);
final WindowInfoImpl info = getInfo(id);
if (anchor == info.getAnchor() && order == info.getOrder()) {
return;
}
// if tool window isn't visible or only order number is changed then just remove/add stripe button
if (!info.isVisible() || anchor == info.getAnchor() || info.isFloating()) {
appendRemoveButtonCmd(id, commandsList);
myLayout.setAnchor(id, anchor, order);
// update infos for all window. Actually we have to update only infos affected by
// setAnchor method
final WindowInfoImpl[] infos = myLayout.getInfos();
for (WindowInfoImpl info1 : infos) {
appendApplyWindowInfoCmd(info1, commandsList);
}
appendAddButtonCmd(getStripeButton(id), info, commandsList);
}
else { // for docked and sliding windows we have to move buttons and window's decorators
info.setVisible(false);
appendRemoveDecoratorCmd(id, false, commandsList);
appendRemoveButtonCmd(id, commandsList);
myLayout.setAnchor(id, anchor, order);
// update infos for all window. Actually we have to update only infos affected by
// setAnchor method
final WindowInfoImpl[] infos = myLayout.getInfos();
for (WindowInfoImpl info1 : infos) {
appendApplyWindowInfoCmd(info1, commandsList);
}
appendAddButtonCmd(getStripeButton(id), info, commandsList);
showToolWindowImpl(id, false, commandsList);
if (info.isActive()) {
appendRequestFocusInToolWindowCmd(id, commandsList, true);
}
}
}
boolean isSplitMode(String id) {
ApplicationManager.getApplication().assertIsDispatchThread();
checkId(id);
return getInfo(id).isSplit();
}
void setSideTool(String id, boolean isSide) {
final ArrayList<FinalizableCommand> commandList = new ArrayList<FinalizableCommand>();
setSplitModeImpl(id, isSide, commandList);
execute(commandList);
}
void setSideToolAndAnchor(String id, ToolWindowAnchor anchor, int order, boolean isSide) {
final ArrayList<FinalizableCommand> commandList = new ArrayList<FinalizableCommand>();
setToolWindowAnchor(id, anchor, order);
setSplitModeImpl(id, isSide, commandList);
execute(commandList);
}
private void setSplitModeImpl(final String id, final boolean isSplit, final ArrayList<FinalizableCommand> commandList) {
checkId(id);
final WindowInfoImpl info = getInfo(id);
if (isSplit == info.isSplit()) {
return;
}
myLayout.setSplitMode(id, isSplit);
boolean wasActive = info.isActive();
if (wasActive) {
deactivateToolWindowImpl(id, true, commandList);
}
final WindowInfoImpl[] infos = myLayout.getInfos();
for (WindowInfoImpl info1 : infos) {
appendApplyWindowInfoCmd(info1, commandList);
}
if (wasActive) {
activateToolWindowImpl(id, commandList, true, true);
}
commandList.add(myToolWindowsPane.createUpdateButtonPositionCmd(id, myWindowManager.getCommandProcessor()));
}
ToolWindowType getToolWindowInternalType(final String id) {
ApplicationManager.getApplication().assertIsDispatchThread();
checkId(id);
return getInfo(id).getInternalType();
}
ToolWindowType getToolWindowType(final String id) {
ApplicationManager.getApplication().assertIsDispatchThread();
checkId(id);
return getInfo(id).getType();
}
private void fireToolWindowRegistered(final String id) {
final ToolWindowManagerListener[] listeners = myListenerList.getListeners(ToolWindowManagerListener.class);
for (ToolWindowManagerListener listener : listeners) {
listener.toolWindowRegistered(id);
}
}
private void fireStateChanged() {
final ToolWindowManagerListener[] listeners = myListenerList.getListeners(ToolWindowManagerListener.class);
for (ToolWindowManagerListener listener : listeners) {
listener.stateChanged();
}
}
boolean isToolWindowActive(final String id) {
ApplicationManager.getApplication().assertIsDispatchThread();
checkId(id);
return getInfo(id).isActive();
}
boolean isToolWindowAutoHide(final String id) {
ApplicationManager.getApplication().assertIsDispatchThread();
checkId(id);
return getInfo(id).isAutoHide();
}
public boolean isToolWindowFloating(final String id) {
ApplicationManager.getApplication().assertIsDispatchThread();
checkId(id);
return getInfo(id).isFloating();
}
boolean isToolWindowVisible(final String id) {
ApplicationManager.getApplication().assertIsDispatchThread();
checkId(id);
return getInfo(id).isVisible();
}
void setToolWindowAutoHide(final String id, final boolean autoHide) {
ApplicationManager.getApplication().assertIsDispatchThread();
final ArrayList<FinalizableCommand> commandList = new ArrayList<FinalizableCommand>();
setToolWindowAutoHideImpl(id, autoHide, commandList);
execute(commandList);
}
private void setToolWindowAutoHideImpl(final String id, final boolean autoHide, final ArrayList<FinalizableCommand> commandsList) {
checkId(id);
final WindowInfoImpl info = getInfo(id);
if (info.isAutoHide() == autoHide) {
return;
}
info.setAutoHide(autoHide);
appendApplyWindowInfoCmd(info, commandsList);
if (info.isVisible()) {
prepareForActivation(id, commandsList);
showAndActivate(id, false, commandsList, true);
}
}
void setToolWindowType(final String id, final ToolWindowType type) {
ApplicationManager.getApplication().assertIsDispatchThread();
final ArrayList<FinalizableCommand> commandList = new ArrayList<FinalizableCommand>();
setToolWindowTypeImpl(id, type, commandList);
execute(commandList);
}
private void setToolWindowTypeImpl(final String id, final ToolWindowType type, final ArrayList<FinalizableCommand> commandsList) {
checkId(id);
final WindowInfoImpl info = getInfo(id);
if (info.getType() == type) {
return;
}
if (info.isVisible()) {
final boolean dirtyMode = info.isDocked() || info.isSliding();
info.setVisible(false);
if (info.isFloating()) {
appendRemoveFloatingDecoratorCmd(info, commandsList);
}
else { // docked and sliding windows
appendRemoveDecoratorCmd(id, dirtyMode, commandsList);
}
info.setType(type);
appendApplyWindowInfoCmd(info, commandsList);
prepareForActivation(id, commandsList);
showAndActivate(id, dirtyMode, commandsList, true);
appendUpdateToolWindowsPaneCmd(commandsList);
}
else {
info.setType(type);
appendApplyWindowInfoCmd(info, commandsList);
}
}
private void appendApplyWindowInfoCmd(final WindowInfoImpl info, final List<FinalizableCommand> commandsList) {
final StripeButton button = getStripeButton(info.getId());
final InternalDecorator decorator = getInternalDecorator(info.getId());
commandsList.add(new ApplyWindowInfoCmd(info, button, decorator, myWindowManager.getCommandProcessor()));
}
/**
* @see com.intellij.openapi.wm.impl.ToolWindowsPane#createAddDecoratorCmd
*/
private void appendAddDecoratorCmd(final InternalDecorator decorator,
final WindowInfoImpl info,
final boolean dirtyMode,
final List<FinalizableCommand> commandsList) {
final CommandProcessor commandProcessor = myWindowManager.getCommandProcessor();
final FinalizableCommand command = myToolWindowsPane.createAddDecoratorCmd(decorator, info, dirtyMode, commandProcessor);
commandsList.add(command);
}
/**
* @see com.intellij.openapi.wm.impl.ToolWindowsPane#createRemoveDecoratorCmd
*/
private void appendRemoveDecoratorCmd(final String id, final boolean dirtyMode, final List<FinalizableCommand> commandsList) {
final FinalizableCommand command = myToolWindowsPane.createRemoveDecoratorCmd(id, dirtyMode, myWindowManager.getCommandProcessor());
commandsList.add(command);
}
private void appendRemoveFloatingDecoratorCmd(final WindowInfoImpl info, final List<FinalizableCommand> commandsList) {
final RemoveFloatingDecoratorCmd command = new RemoveFloatingDecoratorCmd(info);
commandsList.add(command);
}
/**
* @see com.intellij.openapi.wm.impl.ToolWindowsPane#createAddButtonCmd
*/
private void appendAddButtonCmd(final StripeButton button, final WindowInfoImpl info, final List<FinalizableCommand> commandsList) {
final Comparator comparator = myLayout.comparator(info.getAnchor());
final CommandProcessor commandProcessor = myWindowManager.getCommandProcessor();
final FinalizableCommand command = myToolWindowsPane.createAddButtonCmd(button, info, comparator, commandProcessor);
commandsList.add(command);
}
/**
* @see com.intellij.openapi.wm.impl.ToolWindowsPane#createAddButtonCmd
*/
private void appendRemoveButtonCmd(final String id, final List<FinalizableCommand> commandsList) {
final FinalizableCommand command = myToolWindowsPane.createRemoveButtonCmd(id, myWindowManager.getCommandProcessor());
commandsList.add(command);
}
private ActionCallback appendRequestFocusInEditorComponentCmd(final ArrayList<FinalizableCommand> commandList, boolean forced) {
if (myProject.isDisposed()) return new ActionCallback.Done();
final CommandProcessor commandProcessor = myWindowManager.getCommandProcessor();
final RequestFocusInEditorComponentCmd command =
new RequestFocusInEditorComponentCmd(FileEditorManagerEx.getInstanceEx(myProject), commandProcessor, forced);
commandList.add(command);
return command.getDoneCallback();
}
private void appendRequestFocusInToolWindowCmd(final String id, final ArrayList<FinalizableCommand> commandList, boolean forced) {
final ToolWindowImpl toolWindow = (ToolWindowImpl)getToolWindow(id);
final FocusWatcher focusWatcher = myId2FocusWatcher.get(id);
commandList.add(new RequestFocusInToolWindowCmd(toolWindow, focusWatcher, myWindowManager.getCommandProcessor(), forced));
}
/**
* @see com.intellij.openapi.wm.impl.ToolWindowsPane#createSetEditorComponentCmd
*/
private void appendSetEditorComponentCmd(final JComponent component, final List<FinalizableCommand> commandsList) {
final CommandProcessor commandProcessor = myWindowManager.getCommandProcessor();
final FinalizableCommand command = myToolWindowsPane.createSetEditorComponentCmd(component, commandProcessor);
commandsList.add(command);
}
private void appendUpdateToolWindowsPaneCmd(final List<FinalizableCommand> commandsList) {
final JRootPane rootPane = myFrame.getRootPane();
final FinalizableCommand command = new UpdateRootPaneCmd(rootPane, myWindowManager.getCommandProcessor());
commandsList.add(command);
}
/**
* @return <code>true</code> if tool window with the specified <code>id</code>
* is floating and has modal showing child dialog. Such windows should not be closed
* when auto-hide windows are gone.
*/
private boolean hasModalChild(final WindowInfoImpl info) {
if (!info.isVisible() || !info.isFloating()) {
return false;
}
final FloatingDecorator decorator = getFloatingDecorator(info.getId());
LOG.assertTrue(decorator != null);
return isModalOrHasModalChild(decorator);
}
private static boolean isModalOrHasModalChild(final Window window) {
if (window instanceof Dialog) {
final Dialog dialog = (Dialog)window;
if (dialog.isModal() && dialog.isShowing()) {
return true;
}
final Window[] ownedWindows = dialog.getOwnedWindows();
for (int i = ownedWindows.length - 1; i >= 0; i--) {
if (isModalOrHasModalChild(ownedWindows[i])) {
return true;
}
}
}
return false;
}
/**
* Helper method. It deactivates all tool windows excepting the tool window
* which should be activated.
*/
private void prepareForActivation(final String id, final List<FinalizableCommand> commandList) {
final WindowInfoImpl toBeActivatedInfo = getInfo(id);
final WindowInfoImpl[] infos = myLayout.getInfos();
for (final WindowInfoImpl info : infos) {
if (id.equals(info.getId())) {
continue;
}
if (toBeActivatedInfo.isDocked() || toBeActivatedInfo.isSliding()) {
deactivateToolWindowImpl(info.getId(), info.isAutoHide() || info.isSliding(), commandList);
}
else { // floating window is being activated
deactivateToolWindowImpl(info.getId(), info.isAutoHide() && info.isFloating() && !hasModalChild(info), commandList);
}
}
}
public void clearSideStack() {
mySideStack.clear();
}
public void readExternal(final Element element) {
for (final Object o : element.getChildren()) {
final Element e = (Element)o;
if (EDITOR_ELEMENT.equals(e.getName())) {
myEditorComponentActive = Boolean.valueOf(e.getAttributeValue(ACTIVE_ATTR_VALUE)).booleanValue();
}
else if (DesktopLayout.TAG.equals(e.getName())) { // read layout of tool windows
myLayout.readExternal(e);
}
}
}
public void writeExternal(final Element element) {
if (myFrame == null) {
// do nothing if the project was not opened
return;
}
final String[] ids = getToolWindowIds();
// Update size of all open floating windows. See SCR #18439
for (final String id : ids) {
final WindowInfoImpl info = getInfo(id);
if (info.isVisible()) {
final InternalDecorator decorator = getInternalDecorator(id);
LOG.assertTrue(decorator != null);
decorator.fireResized();
}
}
// Save frame's bounds
final Rectangle frameBounds = myFrame.getBounds();
final Element frameElement = new Element(FRAME_ELEMENT);
element.addContent(frameElement);
frameElement.setAttribute(X_ATTR, Integer.toString(frameBounds.x));
frameElement.setAttribute(Y_ATTR, Integer.toString(frameBounds.y));
frameElement.setAttribute(WIDTH_ATTR, Integer.toString(frameBounds.width));
frameElement.setAttribute(HEIGHT_ATTR, Integer.toString(frameBounds.height));
frameElement.setAttribute(EXTENDED_STATE_ATTR, Integer.toString(myFrame.getExtendedState()));
// Save whether editor is active or not
final Element editorElement = new Element(EDITOR_ELEMENT);
editorElement.setAttribute(ACTIVE_ATTR_VALUE, myEditorComponentActive ? Boolean.TRUE.toString() : Boolean.FALSE.toString());
element.addContent(editorElement);
// Save layout of tool windows
final Element layoutElement = new Element(DesktopLayout.TAG);
element.addContent(layoutElement);
myLayout.writeExternal(layoutElement);
}
public void setDefaultState(@NotNull final ToolWindowImpl toolWindow,
@Nullable final ToolWindowAnchor anchor,
@Nullable final ToolWindowType type,
@Nullable final Rectangle floatingBounds) {
final WindowInfoImpl info = getInfo(toolWindow.getId());
if (info.wasRead()) return;
if (floatingBounds != null) {
info.setFloatingBounds(floatingBounds);
}
if (anchor != null) {
toolWindow.setAnchor(anchor, null);
}
if (type != null) {
toolWindow.setType(type, null);
}
}
public void doWhenFocusSettlesDown(@NotNull final Runnable runnable) {
myIdleRequests.add(runnable);
if (myIdleAlarm.getActiveRequestCount() == 0) {
restartIdleAlarm();
}
}
private void restartIdleAlarm() {
myIdleAlarm.cancelAllRequests();
myIdleAlarm.addRequest(myFlushRunnable, 20);
}
private void flushIdleRequests() {
final Runnable[] all = myIdleRequests.toArray(new Runnable[myIdleRequests.size()]);
myIdleRequests.clear();
for (Runnable each : all) {
each.run();
}
}
public boolean isFocusTranferInProgress() {
return myRequestFocusCmd != null;
}
/**
* This command creates and shows <code>FloatingDecorator</code>.
*/
private final class AddFloatingDecoratorCmd extends FinalizableCommand {
private final FloatingDecorator myFloatingDecorator;
/**
* Creates floating decorator for specified floating decorator.
*/
public AddFloatingDecoratorCmd(final InternalDecorator decorator, final WindowInfoImpl info) {
super(myWindowManager.getCommandProcessor());
myFloatingDecorator = new FloatingDecorator(myFrame, info.copy(), decorator);
myId2FloatingDecorator.put(info.getId(), myFloatingDecorator);
final Rectangle bounds = info.getFloatingBounds();
if (bounds != null &&
bounds.width > 0 &&
bounds.height > 0 &&
myWindowManager.isInsideScreenBounds(bounds.x, bounds.y, bounds.width)) {
myFloatingDecorator.setBounds(bounds);
}
else { // place new frame at the center of main frame if there are no floating bounds
Dimension size = decorator.getSize();
if (size.width == 0 || size.height == 0) {
size = decorator.getPreferredSize();
}
myFloatingDecorator.setSize(size);
myFloatingDecorator.setLocationRelativeTo(myFrame);
}
}
public void run() {
try {
myFloatingDecorator.show();
}
finally {
finish();
}
}
}
/**
* This command hides and destroys floating decorator for tool window
* with specified <code>ID</code>.
*/
private final class RemoveFloatingDecoratorCmd extends FinalizableCommand {
private final FloatingDecorator myFloatingDecorator;
public RemoveFloatingDecoratorCmd(final WindowInfoImpl info) {
super(myWindowManager.getCommandProcessor());
myFloatingDecorator = getFloatingDecorator(info.getId());
myId2FloatingDecorator.remove(info.getId());
info.setFloatingBounds(myFloatingDecorator.getBounds());
}
public void run() {
try {
if (Patches.SPECIAL_WINPUT_METHOD_PROCESSING) {
myFloatingDecorator.remove(myFloatingDecorator.getRootPane());
}
myFloatingDecorator.dispose();
}
finally {
finish();
}
}
@Nullable
public Condition getExpireCondition() {
return Condition.FALSE;
}
}
private final class EditorComponentFocusWatcher extends FocusWatcher {
protected void focusedComponentChanged(final Component component, final AWTEvent cause) {
if (myWindowManager.getCommandProcessor().getCommandCount() > 0 || component == null) {
return;
}
// Sometimes focus gained comes when editor is active. For example it can happen when
// user switches between menus or closes some dialog. In that case we just ignore this event,
// i.e. don't initiate deactivation of tool windows and requesting focus in editor.
if (myEditorComponentActive) {
return;
}
final KeyboardFocusManager mgr = KeyboardFocusManager.getCurrentKeyboardFocusManager();
final Component owner = mgr.getFocusOwner();
IdeFocusManager.getInstance(myProject).doWhenFocusSettlesDown(new Runnable() {
public void run() {
if (mgr.getFocusOwner() == owner) {
activateEditorComponent(false);
}
}
});
}
}
/**
* Notifies window manager about focus traversal in tool window
*/
private final class ToolWindowFocusWatcher extends FocusWatcher {
private final String myId;
public ToolWindowFocusWatcher(final ToolWindowImpl toolWindow) {
myId = toolWindow.getId();
install(toolWindow.getComponent());
}
protected boolean isFocusedComponentChangeValid(final Component comp, final AWTEvent cause) {
return myWindowManager.getCommandProcessor().getCommandCount() == 0 && comp != null;
}
protected void focusedComponentChanged(final Component component, final AWTEvent cause) {
if (myWindowManager.getCommandProcessor().getCommandCount() > 0 || component == null) {
return;
}
final WindowInfoImpl info = getInfo(myId);
myFocusedComponentAlaram.cancelAllRequests();
if (!info.isActive()) {
myFocusedComponentAlaram.addRequest(new Runnable() {
public void run() {
if (!myLayout.isToolWindowRegistered(myId)) return;
activateToolWindow(myId, false, false);
}
}, 100);
}
}
}
/**
* Spies on IdeToolWindow properties and applies them to the window
* state.
*/
private final class MyToolWindowPropertyChangeListener implements PropertyChangeListener {
public void propertyChange(final PropertyChangeEvent e) {
final ToolWindowImpl toolWindow = (ToolWindowImpl)e.getSource();
if (ToolWindowEx.PROP_AVAILABLE.equals(e.getPropertyName())) {
final WindowInfoImpl info = getInfo(toolWindow.getId());
if (!toolWindow.isAvailable() && info.isVisible()) {
hideToolWindow(toolWindow.getId(), false);
}
}
}
}
/**
* Translates events from InternalDecorator into ToolWindowManager method invocations.
*/
private final class MyInternalDecoratorListener implements InternalDecoratorListener {
public void anchorChanged(final InternalDecorator source, final ToolWindowAnchor anchor) {
setToolWindowAnchor(source.getToolWindow().getId(), anchor);
}
public void autoHideChanged(final InternalDecorator source, final boolean autoHide) {
setToolWindowAutoHide(source.getToolWindow().getId(), autoHide);
}
public void hidden(final InternalDecorator source) {
hideToolWindow(source.getToolWindow().getId(), false);
}
public void hiddenSide(final InternalDecorator source) {
hideToolWindow(source.getToolWindow().getId(), true);
}
/**
* Handles event from decorator and modify weight/floating bounds of the
* tool window depending on decoration type.
*/
public void resized(final InternalDecorator source) {
final WindowInfoImpl info = getInfo(source.getToolWindow().getId());
if (info.isFloating()) {
final Window owner = SwingUtilities.getWindowAncestor(source);
if (owner != null) {
info.setFloatingBounds(owner.getBounds());
}
}
else { // docked and sliding windows
if (ToolWindowAnchor.TOP == info.getAnchor() || ToolWindowAnchor.BOTTOM == info.getAnchor()) {
info.setWeight((float)source.getHeight() / (float)myToolWindowsPane.getMyLayeredPane().getHeight());
float newSideWeight = (float)source.getWidth() / (float)myToolWindowsPane.getMyLayeredPane().getWidth();
if (newSideWeight < 1.0f) {
info.setSideWeight(newSideWeight);
}
}
else {
info.setWeight((float)source.getWidth() / (float)myToolWindowsPane.getMyLayeredPane().getWidth());
float newSideWeight = (float)source.getHeight() / (float)myToolWindowsPane.getMyLayeredPane().getHeight();
if (newSideWeight < 1.0f) {
info.setSideWeight(newSideWeight);
}
}
}
}
public void activated(final InternalDecorator source) {
activateToolWindow(source.getToolWindow().getId(), true, true);
}
public void typeChanged(final InternalDecorator source, final ToolWindowType type) {
setToolWindowType(source.getToolWindow().getId(), type);
}
public void sideStatusChanged(final InternalDecorator source, final boolean isSideTool) {
setSideTool(source.getToolWindow().getId(), isSideTool);
}
}
private void updateComponentTreeUI() {
ApplicationManager.getApplication().assertIsDispatchThread();
final WindowInfoImpl[] infos = myLayout.getInfos();
for (final WindowInfoImpl info : infos) {
if (info.isVisible()) { // skip visible tool windows (optimization)
continue;
}
SwingUtilities.updateComponentTreeUI(getInternalDecorator(info.getId()));
}
}
private final class MyUIManagerPropertyChangeListener implements PropertyChangeListener {
public void propertyChange(final PropertyChangeEvent e) {
updateComponentTreeUI();
}
}
private final class MyLafManagerListener implements LafManagerListener {
public void lookAndFeelChanged(final LafManager source) {
updateComponentTreeUI();
}
}
public WindowManagerEx getWindowManager() {
return myWindowManager;
}
@NotNull
public String getComponentName() {
return "ToolWindowManager";
}
public ToolWindowsPane getToolWindowsPane() {
return myToolWindowsPane;
}
public ActionCallback requestFocus(final Component c, final boolean forced) {
return requestFocus(new FocusCommand.ByComponent(c), forced);
}
public ActionCallback requestFocus(final FocusCommand command, final boolean forced) {
final ActionCallback result = new ActionCallback();
if (!forced) {
LaterInvocator.invokeLater(new Runnable() {
public void run() {
_requestFocus(command, forced, result);
}
});
}
else {
_requestFocus(command, forced, result);
}
return result;
}
private void _requestFocus(final FocusCommand command, final boolean forced, final ActionCallback result) {
if (checkForRejectOrByPass(command, forced, result)) return;
restartIdleAlarm();
myRequestFocusCmd = command;
if (forced) {
myForcedFocusRequestsAlarm.cancelAllRequests();
setLastEffectiveForcedRequest(command);
}
fixStickingDialogs();
SwingUtilities.invokeLater(new Runnable() {
public void run() {
if (checkForRejectOrByPass(command, forced, result)) return;
if (myRequestFocusCmd == command) {
myRequestFocusCmd = null;
command.run().doWhenDone(new Runnable() {
public void run() {
LaterInvocator.invokeLater(new Runnable() {
public void run() {
result.setDone();
}
});
}
}).doWhenRejected(new Runnable() {
public void run() {
result.setRejected();
}
});
restartIdleAlarm();
if (forced) {
myForcedFocusRequestsAlarm.addRequest(new Runnable() {
public void run() {
setLastEffectiveForcedRequest(null);
}
}, 250);
}
}
else {
rejectCommand(command, result);
}
}
});
}
private void fixStickingDialogs() {
if (!Patches.STICKY_DIALOGS) return;
final KeyboardFocusManager mgr = KeyboardFocusManager.getCurrentKeyboardFocusManager();
final Window wnd = mgr.getActiveWindow();
if (wnd != null && !wnd.isShowing() && wnd.getParent() instanceof Window) {
final Container parent = wnd.getParent();
final Method setActive = ReflectionUtil.findMethod(KeyboardFocusManager.class.getDeclaredMethods(), "setGlobalActiveWindow", Window.class);
if (setActive != null) {
try {
setActive.setAccessible(true);
setActive.invoke(mgr, (Window)parent);
}
catch (Exception e) {
LOG.info(e);
}
}
}
}
private boolean checkForRejectOrByPass(final FocusCommand cmd, final boolean forced, final ActionCallback result) {
if (cmd.isExpired()) {
rejectCommand(cmd, result);
return true;
}
final FocusCommand lastRequest = getLastEffectiveForcedRequest();
if (!forced && !isUnforcedRequestAllowed()) {
if (cmd.equals(lastRequest)) {
result.setDone();
}
else {
rejectCommand(cmd, result);
}
return true;
}
if (lastRequest != null && lastRequest.dominatesOver(cmd)) {
rejectCommand(cmd, result);
return true;
}
if (!myApp.isActive() && !canExecuteOnInactiveApplication(cmd)) {
if (myCallbackOnActivation != null) {
myCallbackOnActivation.setRejected();
}
myFocusCommandOnAppActivation = cmd;
myCallbackOnActivation = result;
return true;
}
return false;
}
private void rejectCommand(FocusCommand cmd, ActionCallback callback) {
if (myRequestFocusCmd == cmd) {
resetCurrentCommand();
}
callback.setRejected();
}
private void resetCurrentCommand() {
myRequestFocusCmd = null;
}
private boolean canExecuteOnInactiveApplication(FocusCommand cmd) {
return !Patches.REQUEST_FOCUS_MAY_ACTIVATE_APP || cmd.canExecuteOnInactiveApp();
}
private void setLastEffectiveForcedRequest(FocusCommand command) {
myLastForcedRequest = new WeakReference<FocusCommand>(command);
}
@Nullable
private FocusCommand getLastEffectiveForcedRequest() {
if (myLastForcedRequest == null) return null;
final FocusCommand request = myLastForcedRequest.get();
return request != null && !request.isExpired() ? request : null;
}
private boolean isUnforcedRequestAllowed() {
return getLastEffectiveForcedRequest() == null;
}
private boolean isProjectComponent(Component c) {
final Component frame = UIUtil.findUltimateParent(c);
if (frame instanceof IdeFrame) {
return frame == myWindowManager.getFrame(myProject);
} else {
return false;
}
}
private class AppListener extends ApplicationAdapter {
@Override
public void applicationDeactivated(IdeFrame ideFrame) {
Component c = getLastFocusedProjectComponent();
if (c == null) {
final Component owner = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner();
if (isProjectComponent(owner)) {
c = owner;
}
}
myFocusedComponentOnDeactivation = c != null ? new WeakReference<Component>(c) : null;
}
@Override
public void applicationActivated(IdeFrame ideFrame) {
final FocusCommand cmd = myFocusCommandOnAppActivation;
ActionCallback callback = myCallbackOnActivation;
myFocusCommandOnAppActivation = null;
myCallbackOnActivation = null;
if (cmd != null && !cmd.isExpired()) {
requestFocus(cmd, true).notifyWhenDone(callback);
} else {
final KeyboardFocusManager mgr = KeyboardFocusManager.getCurrentKeyboardFocusManager();
if (ideFrame == myWindowManager.getFrame(myProject)) {
final Component owner = mgr.getFocusOwner();
Component old = myFocusedComponentOnDeactivation != null ? myFocusedComponentOnDeactivation.get() : null;
if (old == null || !old.isShowing()) {
old = IdeFocusTraversalPolicy.getPreferredFocusedComponent(((IdeFrameImpl)ideFrame).getRootPane());
}
if (owner == null && old != null && old.isShowing()) {
requestFocus(old, false);
}
myFocusedComponentOnDeactivation = null;
}
}
}
}
}
| platform-impl/src/com/intellij/openapi/wm/impl/ToolWindowManagerImpl.java | package com.intellij.openapi.wm.impl;
import com.intellij.Patches;
import com.intellij.ide.IdeEventQueue;
import com.intellij.ide.ui.LafManager;
import com.intellij.ide.ui.LafManagerListener;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationAdapter;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.impl.LaterInvocator;
import com.intellij.openapi.components.ProjectComponent;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileEditor.ex.FileEditorManagerEx;
import com.intellij.openapi.project.DumbAwareRunnable;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.startup.StartupManager;
import com.intellij.openapi.ui.MessageType;
import com.intellij.openapi.ui.popup.Balloon;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.util.*;
import com.intellij.openapi.wm.*;
import com.intellij.openapi.wm.ex.*;
import com.intellij.openapi.wm.impl.commands.*;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.util.Alarm;
import com.intellij.util.ArrayUtil;
import com.intellij.util.ReflectionUtil;
import com.intellij.util.containers.CollectionFactory;
import com.intellij.util.containers.HashMap;
import com.intellij.util.containers.HashSet;
import com.intellij.util.ui.UIUtil;
import com.intellij.util.ui.update.UiNotifyConnector;
import gnu.trove.THashSet;
import org.jdom.Element;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.EventListenerList;
import javax.swing.event.HyperlinkListener;
import java.awt.*;
import java.awt.event.FocusEvent;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.lang.ref.WeakReference;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Set;
/**
* @author Anton Katilin
* @author Vladimir Kondratyev
*/
public final class ToolWindowManagerImpl extends ToolWindowManagerEx implements ProjectComponent, JDOMExternalizable {
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.wm.impl.ToolWindowManagerImpl");
private final Project myProject;
private final WindowManagerEx myWindowManager;
private final EventListenerList myListenerList;
private final DesktopLayout myLayout;
private final HashMap<String, InternalDecorator> myId2InternalDecorator;
private final HashMap<String, FloatingDecorator> myId2FloatingDecorator;
private final HashMap<String, StripeButton> myId2StripeButton;
private final HashMap<String, FocusWatcher> myId2FocusWatcher;
private final Set<String> myDumbAwareIds = CollectionFactory.newTroveSet();
private final EditorComponentFocusWatcher myEditorComponentFocusWatcher;
private final MyToolWindowPropertyChangeListener myToolWindowPropertyChangeListener;
private final InternalDecoratorListener myInternalDecoratorListener;
private final MyUIManagerPropertyChangeListener myUIManagerPropertyChangeListener;
private final MyLafManagerListener myLafManagerListener;
private boolean myEditorComponentActive;
private final ActiveStack myActiveStack;
private final SideStack mySideStack;
private ToolWindowsPane myToolWindowsPane;
private IdeFrameImpl myFrame;
private DesktopLayout myLayoutToRestoreLater = null;
@NonNls private static final String EDITOR_ELEMENT = "editor";
@NonNls private static final String ACTIVE_ATTR_VALUE = "active";
@NonNls private static final String FRAME_ELEMENT = "frame";
@NonNls private static final String X_ATTR = "x";
@NonNls private static final String Y_ATTR = "y";
@NonNls private static final String WIDTH_ATTR = "width";
@NonNls private static final String HEIGHT_ATTR = "height";
@NonNls private static final String EXTENDED_STATE_ATTR = "extended-state";
private final Alarm myFocusedComponentAlaram;
private final Alarm myForcedFocusRequestsAlarm;
private final Alarm myIdleAlarm;
private final Set<Runnable> myIdleRequests = new HashSet<Runnable>();
private final Runnable myFlushRunnable = new Runnable() {
public void run() {
flushIdleRequests();
}
};
private ActiveRunnable myRequestFocusCmd;
private WeakReference<FocusCommand> myLastForcedRequest = new WeakReference<FocusCommand>(null);
private Application myApp;
private AppListener myAppListener;
private FocusCommand myFocusCommandOnAppActivation;
private ActionCallback myCallbackOnActivation;
private WeakReference<Component> myFocusedComponentOnDeactivation;
private WeakReference<Component> myLastFocusedProjectComponent;
/**
* invoked by reflection
*/
public ToolWindowManagerImpl(final Project project, WindowManagerEx windowManagerEx, Application app) {
myProject = project;
myWindowManager = windowManagerEx;
myListenerList = new EventListenerList();
myLayout = new DesktopLayout();
myLayout.copyFrom(windowManagerEx.getLayout());
myId2InternalDecorator = new HashMap<String, InternalDecorator>();
myId2FloatingDecorator = new HashMap<String, FloatingDecorator>();
myId2StripeButton = new HashMap<String, StripeButton>();
myId2FocusWatcher = new HashMap<String, FocusWatcher>();
myEditorComponentFocusWatcher = new EditorComponentFocusWatcher();
myToolWindowPropertyChangeListener = new MyToolWindowPropertyChangeListener();
myInternalDecoratorListener = new MyInternalDecoratorListener();
myUIManagerPropertyChangeListener = new MyUIManagerPropertyChangeListener();
myLafManagerListener = new MyLafManagerListener();
myEditorComponentActive = false;
myActiveStack = new ActiveStack();
mySideStack = new SideStack();
myFocusedComponentAlaram = new Alarm(Alarm.ThreadToUse.SWING_THREAD, project);
myForcedFocusRequestsAlarm = new Alarm(Alarm.ThreadToUse.SWING_THREAD, project);
myIdleAlarm = new Alarm(Alarm.ThreadToUse.SWING_THREAD, project);
myApp = app;
myAppListener = new AppListener();
myApp.addApplicationListener(myAppListener);
IdeEventQueue.getInstance().addDispatcher(new IdeEventQueue.EventDispatcher() {
public boolean dispatch(AWTEvent e) {
if (e instanceof FocusEvent) {
final FocusEvent fe = (FocusEvent)e;
final Component c = fe.getComponent();
final IdeFrameImpl frame = myWindowManager.getFrame(myProject);
if (c instanceof Window || c == null || frame == null) return false;
if (isProjectComponent(c)) {
if (fe.getID() == FocusEvent.FOCUS_GAINED) {
myLastFocusedProjectComponent = new WeakReference<Component>(c);
}
}
}
return false;
}
}, myProject);
}
private Component getLastFocusedProjectComponent() {
return myLastFocusedProjectComponent != null ? myLastFocusedProjectComponent.get() : null;
}
public Project getProject() {
return myProject;
}
public void initComponent() {
}
public void disposeComponent() {
myApp.removeApplicationListener(myAppListener);
}
public void projectOpened() {
UIManager.addPropertyChangeListener(myUIManagerPropertyChangeListener);
LafManager.getInstance().addLafManagerListener(myLafManagerListener);
myFrame = myWindowManager.allocateFrame(myProject);
LOG.assertTrue(myFrame != null);
final ArrayList<FinalizableCommand> commandsList = new ArrayList<FinalizableCommand>();
myToolWindowsPane = new ToolWindowsPane(myFrame, this);
((IdeRootPane)myFrame.getRootPane()).setToolWindowsPane(myToolWindowsPane);
appendUpdateToolWindowsPaneCmd(commandsList);
myFrame.setTitle(FrameTitleBuilder.getInstance().getProjectTitle(myProject));
final JComponent editorComponent = FileEditorManagerEx.getInstanceEx(myProject).getComponent();
myEditorComponentFocusWatcher.install(editorComponent);
appendSetEditorComponentCmd(editorComponent, commandsList);
if (myEditorComponentActive) {
activateEditorComponentImpl(commandsList, true);
}
execute(commandsList);
final DumbService.DumbModeListener dumbModeListener = new DumbService.DumbModeListener() {
private final Set<String> hiddenIds = new THashSet<String>();
public void enteredDumbMode() {
}
public void beforeEnteringDumbMode() {
for (final String id : getToolWindowIds()) {
if (!myDumbAwareIds.contains(id)) {
if (isToolWindowVisible(id)) {
hiddenIds.add(id);
hideToolWindow(id, true);
}
getStripeButton(id).setEnabled(false);
}
}
}
public void exitDumbMode() {
for (final String id : getToolWindowIds()) {
getStripeButton(id).setEnabled(true);
}
for (final String id : hiddenIds) {
showToolWindow(id);
}
hiddenIds.clear();
}
};
myProject.getMessageBus().connect().subscribe(DumbService.DUMB_MODE, dumbModeListener);
StartupManager.getInstance(myProject).registerPostStartupActivity(new DumbAwareRunnable() {
public void run() {
registerToolWindowsFromBeans();
if (DumbService.getInstance().isDumb()) {
dumbModeListener.beforeEnteringDumbMode();
}
}
});
}
private void registerToolWindowsFromBeans() {
ToolWindowEP[] beans = Extensions.getExtensions(ToolWindowEP.EP_NAME);
for (final ToolWindowEP bean : beans) {
final Condition condition = bean.getCondition();
if (condition != null && !condition.value(myProject)) {
continue;
}
ToolWindowAnchor toolWindowAnchor;
try {
toolWindowAnchor = ToolWindowAnchor.fromText(bean.anchor);
}
catch (Exception e) {
LOG.error(e);
continue;
}
JLabel label = new JLabel("Initializing toolwindow...");
final ToolWindow toolWindow = registerToolWindow(bean.id, label, toolWindowAnchor, myProject);
if (bean.icon != null) {
toolWindow.setIcon(IconLoader.getIcon(bean.icon));
}
toolWindow.setSplitMode(bean.secondary, null);
UiNotifyConnector.doWhenFirstShown(label, new Runnable() {
public void run() {
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
ToolWindowFactory factory = bean.getToolWindowFactory();
toolWindow.getContentManager().removeAllContents(false);
factory.createToolWindowContent(myProject, toolWindow);
}
});
}
});
}
}
public void projectClosed() {
UIManager.removePropertyChangeListener(myUIManagerPropertyChangeListener);
LafManager.getInstance().removeLafManagerListener(myLafManagerListener);
final ArrayList<FinalizableCommand> commandsList = new ArrayList<FinalizableCommand>();
final String[] ids = getToolWindowIds();
// Remove ToolWindowsPane
((IdeRootPane)myFrame.getRootPane()).setToolWindowsPane(null);
myWindowManager.releaseFrame(myFrame);
appendUpdateToolWindowsPaneCmd(commandsList);
// Hide all tool windows
for (final String id : ids) {
deactivateToolWindowImpl(id, true, commandsList);
}
// Remove editor component
final JComponent editorComponent = FileEditorManagerEx.getInstanceEx(myProject).getComponent();
myEditorComponentFocusWatcher.deinstall(editorComponent);
appendSetEditorComponentCmd(null, commandsList);
execute(commandsList);
}
public void addToolWindowManagerListener(final ToolWindowManagerListener l) {
myListenerList.add(ToolWindowManagerListener.class, l);
}
public void removeToolWindowManagerListener(final ToolWindowManagerListener l) {
myListenerList.remove(ToolWindowManagerListener.class, l);
}
/**
* This is helper method. It delegated its fuctionality to the WindowManager.
* Before delegating it fires state changed.
*/
private void execute(final ArrayList<FinalizableCommand> commandList) {
fireStateChanged();
for (FinalizableCommand each : commandList) {
each.beforeExecute(this);
}
myWindowManager.getCommandProcessor().execute(commandList, myProject.getDisposed());
}
public void activateEditorComponent() {
activateEditorComponent(true);
}
private void activateEditorComponent(boolean forced) {
if (LOG.isDebugEnabled()) {
LOG.debug("enter: activateEditorComponent()");
}
ApplicationManager.getApplication().assertIsDispatchThread();
final ArrayList<FinalizableCommand> commandList = new ArrayList<FinalizableCommand>();
activateEditorComponentImpl(commandList, forced);
execute(commandList);
}
private void activateEditorComponentImpl(final ArrayList<FinalizableCommand> commandList, final boolean forced) {
final String active = getActiveToolWindowId();
// Now we have to request focus into most recent focused editor
appendRequestFocusInEditorComponentCmd(commandList, forced).doWhenDone(new Runnable() {
public void run() {
final ArrayList<FinalizableCommand> postExecute = new ArrayList<FinalizableCommand>();
if (LOG.isDebugEnabled()) {
LOG.debug("editor activated");
}
deactivateWindows(postExecute, null);
myActiveStack.clear();
myEditorComponentActive = true;
execute(postExecute);
}
}).doWhenRejected(new Runnable() {
public void run() {
if (forced) {
requestFocus(new FocusCommand() {
public ActionCallback run() {
final ArrayList<FinalizableCommand> cmds = new ArrayList<FinalizableCommand>();
final WindowInfoImpl toReactivate = getInfo(active);
final boolean reactivateLastActive = toReactivate != null && !isToHide(toReactivate);
deactivateWindows(cmds, reactivateLastActive ? active : null);
execute(cmds);
if (reactivateLastActive) {
activateToolWindow(active, false, true);
}
else {
if (active != null) {
myActiveStack.remove(active, false);
}
if (!myActiveStack.isEmpty()) {
activateToolWindow(myActiveStack.peek(), false, true);
}
}
return new ActionCallback.Done();
}
}, false);
}
}
});
}
private void deactivateWindows(final ArrayList<FinalizableCommand> postExecute, String idToIgnore) {
final WindowInfoImpl[] infos = myLayout.getInfos();
for (final WindowInfoImpl info : infos) {
final boolean shouldHide = isToHide(info);
if (idToIgnore != null && idToIgnore.equals(info.getId())) {
continue;
}
deactivateToolWindowImpl(info.getId(), shouldHide, postExecute);
}
}
private boolean isToHide(final WindowInfoImpl info) {
return (info.isAutoHide() || info.isSliding()) && !(info.isFloating() && hasModalChild(info));
}
/**
* Helper method. It makes window visible, activates it and request focus into the tool window.
* But it doesn't deactivate other tool windows. Use <code>prepareForActivation</code> method to
* deactivates other tool windows.
*
* @param dirtyMode if <code>true</code> then all UI operations are performed in "dirty" mode.
* It means that UI isn't validated and repainted just after each add/remove operation.
* @see ToolWindowManagerImpl#prepareForActivation
*/
private void showAndActivate(final String id,
final boolean dirtyMode,
final ArrayList<FinalizableCommand> commandsList,
boolean autoFocusContents) {
if (!getToolWindow(id).isAvailable()) {
return;
}
// show activated
final WindowInfoImpl info = getInfo(id);
boolean toApplyInfo = false;
if (!info.isActive()) {
info.setActive(true);
toApplyInfo = true;
}
showToolWindowImpl(id, dirtyMode, commandsList);
// activate
if (toApplyInfo) {
appendApplyWindowInfoCmd(info, commandsList);
myActiveStack.push(id);
myEditorComponentActive = false;
}
if (autoFocusContents) {
appendRequestFocusInToolWindowCmd(id, commandsList, true);
}
}
void activateToolWindow(final String id, boolean forced, boolean autoFocusContents) {
if (LOG.isDebugEnabled()) {
LOG.debug("enter: activateToolWindow(" + id + ")");
}
ApplicationManager.getApplication().assertIsDispatchThread();
checkId(id);
if (DumbService.getInstance().isDumb() && !myDumbAwareIds.contains(id)) {
return;
}
final ArrayList<FinalizableCommand> commandList = new ArrayList<FinalizableCommand>();
activateToolWindowImpl(id, commandList, forced, autoFocusContents);
execute(commandList);
}
private void activateToolWindowImpl(final String id,
final ArrayList<FinalizableCommand> commandList,
boolean forced,
boolean autoFocusContents) {
if (!isUnforcedRequestAllowed() && !forced) return;
if (LOG.isDebugEnabled()) {
LOG.debug("enter: activateToolWindowImpl(" + id + ")");
}
if (!getToolWindow(id).isAvailable()) {
// Tool window can be "logically" active but not focused. For example,
// when the user switched to another application. So we just need to bring
// tool window's window to front.
final InternalDecorator decorator = getInternalDecorator(id);
if (!decorator.hasFocus() && autoFocusContents) {
appendRequestFocusInToolWindowCmd(id, commandList, forced);
}
return;
}
prepareForActivation(id, commandList);
showAndActivate(id, false, commandList, autoFocusContents);
}
/**
* Checkes whether the specified <code>id</code> defines installed tool
* window. If it's not then throws <code>IllegalStateException</code>.
*
* @throws IllegalStateException if tool window isn't installed.
*/
private void checkId(final String id) {
if (!myLayout.isToolWindowRegistered(id)) {
throw new IllegalStateException("window with id=\"" + id + "\" isn't registered");
}
}
/**
* Helper method. It deactivates (and hides) window with specified <code>id</code>.
*
* @param id <code>id</code> of the tool window to be deactivated.
* @param shouldHide if <code>true</code> then also hides specified tool window.
*/
private void deactivateToolWindowImpl(final String id, final boolean shouldHide, final List<FinalizableCommand> commandsList) {
if (LOG.isDebugEnabled()) {
LOG.debug("enter: deactivateToolWindowImpl(" + id + "," + shouldHide + ")");
}
final WindowInfoImpl info = getInfo(id);
if (shouldHide && info.isVisible()) {
info.setVisible(false);
if (info.isFloating()) {
appendRemoveFloatingDecoratorCmd(info, commandsList);
}
else { // docked and sliding windows
appendRemoveDecoratorCmd(id, false, commandsList);
}
}
info.setActive(false);
appendApplyWindowInfoCmd(info, commandsList);
}
public String[] getToolWindowIds() {
ApplicationManager.getApplication().assertIsDispatchThread();
final WindowInfoImpl[] infos = myLayout.getInfos();
final String[] ids = ArrayUtil.newStringArray(infos.length);
for (int i = 0; i < infos.length; i++) {
ids[i] = infos[i].getId();
}
return ids;
}
public String getActiveToolWindowId() {
ApplicationManager.getApplication().assertIsDispatchThread();
return myLayout.getActiveId();
}
public String getLastActiveToolWindowId() {
return getLastActiveToolWindowId(null);
}
public String getLastActiveToolWindowId(Condition<JComponent> condition) {
ApplicationManager.getApplication().assertIsDispatchThread();
String lastActiveToolWindowId = null;
for (int i = 0; i < myActiveStack.getPersistentSize(); i++) {
final String id = myActiveStack.peekPersistent(i);
final ToolWindow toolWindow = getToolWindow(id);
LOG.assertTrue(toolWindow != null);
if (toolWindow.isAvailable()) {
if (condition == null || condition.value(toolWindow.getComponent())) {
lastActiveToolWindowId = id;
break;
}
}
}
return lastActiveToolWindowId;
}
/**
* @return floating decorator for the tool window with specified <code>ID</code>.
*/
private FloatingDecorator getFloatingDecorator(final String id) {
return myId2FloatingDecorator.get(id);
}
/**
* @return internal decorator for the tool window with specified <code>ID</code>.
*/
private InternalDecorator getInternalDecorator(final String id) {
return myId2InternalDecorator.get(id);
}
/**
* @return tool button for the window with specified <code>ID</code>.
*/
private StripeButton getStripeButton(final String id) {
return myId2StripeButton.get(id);
}
/**
* @return info for the tool window with specified <code>ID</code>.
*/
private WindowInfoImpl getInfo(final String id) {
return myLayout.getInfo(id, true);
}
public List<String> getIdsOn(final ToolWindowAnchor anchor) {
return myLayout.getVisibleIdsOn(anchor, this);
}
public ToolWindow getToolWindow(final String id) {
ApplicationManager.getApplication().assertIsDispatchThread();
if (!myLayout.isToolWindowRegistered(id)) {
return null;
}
return getInternalDecorator(id).getToolWindow();
}
void showToolWindow(final String id) {
if (LOG.isDebugEnabled()) {
LOG.debug("enter: showToolWindow(" + id + ")");
}
ApplicationManager.getApplication().assertIsDispatchThread();
final ArrayList<FinalizableCommand> commandList = new ArrayList<FinalizableCommand>();
showToolWindowImpl(id, false, commandList);
execute(commandList);
}
public void hideToolWindow(final String id, final boolean hideSide) {
ApplicationManager.getApplication().assertIsDispatchThread();
checkId(id);
final WindowInfoImpl info = getInfo(id);
if (!info.isVisible()) return;
final ArrayList<FinalizableCommand> commandList = new ArrayList<FinalizableCommand>();
final boolean wasActive = info.isActive();
// hide and deactivate
deactivateToolWindowImpl(id, true, commandList);
if (hideSide || info.isFloating()) {
while (!mySideStack.isEmpty(info.getAnchor())) {
mySideStack.pop(info.getAnchor());
}
final String[] all = getToolWindowIds();
for (String eachId : all) {
final WindowInfoImpl eachInfo = getInfo(eachId);
if (eachInfo.isVisible() && eachInfo.getAnchor() == info.getAnchor()) {
deactivateToolWindowImpl(eachId, true, commandList);
}
}
activateEditorComponentImpl(commandList, true);
}
else {
// first of all we have to find tool window that was located at the same side and
// was hidden.
WindowInfoImpl info2 = null;
while (!mySideStack.isEmpty(info.getAnchor())) {
final WindowInfoImpl storedInfo = mySideStack.pop(info.getAnchor());
final WindowInfoImpl currentInfo = getInfo(storedInfo.getId());
LOG.assertTrue(currentInfo != null);
// SideStack contains copies of real WindowInfos. It means that
// these stored infos can be invalid. The following loop removes invalid WindowInfos.
if (storedInfo.getAnchor() == currentInfo.getAnchor() &&
storedInfo.getType() == currentInfo.getType() &&
storedInfo.isAutoHide() == currentInfo.isAutoHide()) {
info2 = storedInfo;
break;
}
}
if (info2 != null) {
showToolWindowImpl(info2.getId(), false, commandList);
}
// If we hide currently active tool window then we should activate the previous
// one which is located in the tool window stack.
// Activate another tool window if no active tool window exists and
// window stack is enabled.
myActiveStack.remove(id, false); // hidden window should be at the top of stack
if (wasActive) {
if (myActiveStack.isEmpty()) {
activateEditorComponentImpl(commandList, false);
}
else {
final String toBeActivatedId = myActiveStack.pop();
if (toBeActivatedId != null) {
activateToolWindowImpl(toBeActivatedId, commandList, false, true);
}
}
}
}
execute(commandList);
}
/**
* @param dirtyMode if <code>true</code> then all UI operations are performed in dirty mode.
*/
private void showToolWindowImpl(final String id, final boolean dirtyMode, final List<FinalizableCommand> commandsList) {
final WindowInfoImpl toBeShownInfo = getInfo(id);
if (toBeShownInfo.isVisible() || !getToolWindow(id).isAvailable()) {
return;
}
toBeShownInfo.setVisible(true);
final InternalDecorator decorator = getInternalDecorator(id);
if (toBeShownInfo.isFloating()) {
commandsList.add(new AddFloatingDecoratorCmd(decorator, toBeShownInfo));
}
else { // docked and sliding windows
// If there is tool window on the same side then we have to hide it, i.e.
// clear place for tool window to be shown.
//
// We store WindowInfo of hidden tool window in the SideStack (if the tool window
// is docked and not auto-hide one). Therefore it's possible to restore the
// hidden tool window when showing tool window will be closed.
final WindowInfoImpl[] infos = myLayout.getInfos();
for (final WindowInfoImpl info : infos) {
if (id.equals(info.getId())) {
continue;
}
if (info.isVisible() &&
info.getType() == toBeShownInfo.getType() &&
info.getAnchor() == toBeShownInfo.getAnchor() &&
info.isSplit() == toBeShownInfo.isSplit()) {
// hide and deactivate tool window
info.setVisible(false);
appendRemoveDecoratorCmd(info.getId(), false, commandsList);
if (info.isActive()) {
info.setActive(false);
}
appendApplyWindowInfoCmd(info, commandsList);
// store WindowInfo into the SideStack
if (info.isDocked() && !info.isAutoHide()) {
mySideStack.push(info);
}
}
}
appendAddDecoratorCmd(decorator, toBeShownInfo, dirtyMode, commandsList);
// Remove tool window from the SideStack.
mySideStack.remove(id);
}
appendApplyWindowInfoCmd(toBeShownInfo, commandsList);
}
public ToolWindow registerToolWindow(@NotNull final String id,
@NotNull final JComponent component,
@NotNull final ToolWindowAnchor anchor) {
return registerToolWindow(id, component, anchor, false, false, false);
}
public ToolWindow registerToolWindow(@NotNull final String id, final boolean canCloseContent, @NotNull final ToolWindowAnchor anchor) {
return registerToolWindow(id, null, anchor, false, canCloseContent, false);
}
public ToolWindow registerToolWindow(@NotNull final String id,
final boolean canCloseContent,
@NotNull final ToolWindowAnchor anchor,
final boolean sideTool) {
return registerToolWindow(id, null, anchor, sideTool, canCloseContent, false);
}
public ToolWindow registerToolWindow(@NotNull final String id, final boolean canCloseContent, @NotNull final ToolWindowAnchor anchor,
final Disposable parentDisposable, final boolean canWorkInDumbMode) {
return registerDisposable(id, parentDisposable, registerToolWindow(id, null, anchor, false, canCloseContent, canWorkInDumbMode));
}
private ToolWindow registerToolWindow(@NotNull final String id,
@Nullable final JComponent component,
@NotNull final ToolWindowAnchor anchor,
boolean sideTool,
boolean canCloseContent, final boolean canWorkInDumbMode) {
if (LOG.isDebugEnabled()) {
LOG.debug("enter: installToolWindow(" + id + "," + component + "," + anchor + "\")");
}
ApplicationManager.getApplication().assertIsDispatchThread();
if (myLayout.isToolWindowRegistered(id)) {
throw new IllegalArgumentException("window with id=\"" + id + "\" is already registered");
}
final WindowInfoImpl info = myLayout.register(id, anchor, sideTool);
final boolean wasActive = info.isActive();
final boolean wasVisible = info.isVisible();
info.setActive(false);
info.setVisible(false);
// Create decorator
final ToolWindowImpl toolWindow = new ToolWindowImpl(this, id, canCloseContent, component);
final InternalDecorator decorator = new InternalDecorator(myProject, info.copy(), toolWindow);
myId2InternalDecorator.put(id, decorator);
decorator.addInternalDecoratorListener(myInternalDecoratorListener);
toolWindow.addPropertyChangeListener(myToolWindowPropertyChangeListener);
myId2FocusWatcher.put(id, new ToolWindowFocusWatcher(toolWindow));
if (canWorkInDumbMode) {
myDumbAwareIds.add(id);
}
// Create and show tool button
final StripeButton button = new StripeButton(decorator, myToolWindowsPane);
myId2StripeButton.put(id, button);
final ArrayList<FinalizableCommand> commandsList = new ArrayList<FinalizableCommand>();
appendAddButtonCmd(button, info, commandsList);
// If preloaded info is visible or active then we have to show/activate the installed
// tool window. This step has sense only for windows which are not in the autohide
// mode. But if tool window was active but its mode doen't allow to activate it again
// (for example, tool window is in autohide mode) then we just activate editor component.
if (!info.isAutoHide() && (info.isDocked() || info.isFloating())) {
if (wasActive) {
activateToolWindowImpl(info.getId(), commandsList, true, true);
}
else if (wasVisible) {
showToolWindowImpl(info.getId(), false, commandsList);
}
}
else if (wasActive) { // tool window was active but it cannot be activate again
activateEditorComponentImpl(commandsList, true);
}
execute(commandsList);
fireToolWindowRegistered(id);
return toolWindow;
}
public ToolWindow registerToolWindow(@NotNull final String id,
@NotNull JComponent component,
@NotNull ToolWindowAnchor anchor,
Disposable parentDisposable) {
return registerDisposable(id, parentDisposable, registerToolWindow(id, component, anchor));
}
private ToolWindow registerDisposable(final String id, final Disposable parentDisposable, final ToolWindow window) {
Disposer.register(parentDisposable, new Disposable() {
public void dispose() {
unregisterToolWindow(id);
}
});
return window;
}
public void unregisterToolWindow(@NotNull final String id) {
if (LOG.isDebugEnabled()) {
LOG.debug("enter: unregisterToolWindow(" + id + ")");
}
ApplicationManager.getApplication().assertIsDispatchThread();
if (!myLayout.isToolWindowRegistered(id)) {
return;
}
final WindowInfoImpl info = getInfo(id);
final ToolWindowEx toolWindow = (ToolWindowEx)getToolWindow(id);
// Save recent appearance of tool window
myLayout.unregister(id);
// Remove decorator and tool button from the screen
final ArrayList<FinalizableCommand> commandsList = new ArrayList<FinalizableCommand>();
if (info.isVisible()) {
info.setVisible(false);
if (info.isFloating()) {
appendRemoveFloatingDecoratorCmd(info, commandsList);
}
else { // floating and sliding windows
appendRemoveDecoratorCmd(id, false, commandsList);
}
}
appendRemoveButtonCmd(id, commandsList);
appendApplyWindowInfoCmd(info, commandsList);
execute(commandsList);
// Remove all references on tool window and save its last properties
toolWindow.removePropertyChangeListener(myToolWindowPropertyChangeListener);
myActiveStack.remove(id, true);
mySideStack.remove(id);
// Destroy stripe button
final StripeButton button = getStripeButton(id);
button.dispose();
myId2StripeButton.remove(id);
//
myId2FocusWatcher.remove(id);
// Destroy decorator
final InternalDecorator decorator = getInternalDecorator(id);
decorator.dispose();
decorator.removeInternalDecoratorListener(myInternalDecoratorListener);
myId2InternalDecorator.remove(id);
}
public DesktopLayout getLayout() {
ApplicationManager.getApplication().assertIsDispatchThread();
return myLayout;
}
public void setLayoutToRestoreLater(DesktopLayout layout) {
myLayoutToRestoreLater = layout;
}
public DesktopLayout getLayoutToRestoreLater() {
return myLayoutToRestoreLater;
}
public void setLayout(final DesktopLayout layout) {
ApplicationManager.getApplication().assertIsDispatchThread();
final ArrayList<FinalizableCommand> commandList = new ArrayList<FinalizableCommand>();
// hide tool window that are invisible in new layout
final WindowInfoImpl[] currentInfos = myLayout.getInfos();
for (final WindowInfoImpl currentInfo : currentInfos) {
final WindowInfoImpl info = layout.getInfo(currentInfo.getId(), false);
if (info == null) {
continue;
}
if (currentInfo.isVisible() && !info.isVisible()) {
deactivateToolWindowImpl(currentInfo.getId(), true, commandList);
}
}
// change anchor of tool windows
for (final WindowInfoImpl currentInfo : currentInfos) {
final WindowInfoImpl info = layout.getInfo(currentInfo.getId(), false);
if (info == null) {
continue;
}
if (currentInfo.getAnchor() != info.getAnchor() || currentInfo.getOrder() != info.getOrder()) {
setToolWindowAnchorImpl(currentInfo.getId(), info.getAnchor(), info.getOrder(), commandList);
}
}
// change types of tool windows
for (final WindowInfoImpl currentInfo : currentInfos) {
final WindowInfoImpl info = layout.getInfo(currentInfo.getId(), false);
if (info == null) {
continue;
}
if (currentInfo.getType() != info.getType()) {
setToolWindowTypeImpl(currentInfo.getId(), info.getType(), commandList);
}
}
// change auto-hide state
for (final WindowInfoImpl currentInfo : currentInfos) {
final WindowInfoImpl info = layout.getInfo(currentInfo.getId(), false);
if (info == null) {
continue;
}
if (currentInfo.isAutoHide() != info.isAutoHide()) {
setToolWindowAutoHideImpl(currentInfo.getId(), info.isAutoHide(), commandList);
}
}
// restore visibility
for (final WindowInfoImpl currentInfo : currentInfos) {
final WindowInfoImpl info = layout.getInfo(currentInfo.getId(), false);
if (info == null) {
continue;
}
if (info.isVisible()) {
showToolWindowImpl(currentInfo.getId(), false, commandList);
}
}
// if there is no any active tool window and editor is also inactive
// then activate editor
if (!myEditorComponentActive && getActiveToolWindowId() == null) {
activateEditorComponentImpl(commandList, true);
}
execute(commandList);
}
public void invokeLater(final Runnable runnable) {
final ArrayList<FinalizableCommand> commandList = new ArrayList<FinalizableCommand>();
commandList.add(new InvokeLaterCmd(runnable, myWindowManager.getCommandProcessor()));
execute(commandList);
}
public IdeFocusManager getFocusManager() {
return IdeFocusManager.getInstance(myProject);
}
@Override
public void notifyByBalloon(@NotNull final String toolWindowId, @NotNull final MessageType type, @NotNull final String htmlBody) {
notifyByBalloon(toolWindowId, type, htmlBody, null, null);
}
public void notifyByBalloon(@NotNull final String toolWindowId,
final MessageType type,
@NotNull final String text,
@Nullable final Icon icon,
@Nullable HyperlinkListener listener) {
checkId(toolWindowId);
final Stripe stripe = myToolWindowsPane.getStripeFor(toolWindowId);
final ToolWindowImpl window = getInternalDecorator(toolWindowId).getToolWindow();
if (!window.isAvailable()) {
window.setPlaceholderMode(true);
stripe.updateState();
stripe.revalidate();
stripe.repaint();
}
final ToolWindowAnchor anchor = getInfo(toolWindowId).getAnchor();
final Ref<Balloon.Position> position = Ref.create(Balloon.Position.below);
if (ToolWindowAnchor.TOP == anchor) {
position.set(Balloon.Position.below);
}
else if (ToolWindowAnchor.BOTTOM == anchor) {
position.set(Balloon.Position.above);
}
else if (ToolWindowAnchor.LEFT == anchor) {
position.set(Balloon.Position.atRight);
}
else if (ToolWindowAnchor.RIGHT == anchor) {
position.set(Balloon.Position.atLeft);
}
Icon actualIcon = icon != null ? icon : type.getDefaultIcon();
final Balloon balloon =
JBPopupFactory.getInstance().createHtmlTextBalloonBuilder(text.replace("\n", "<br>"), actualIcon, type.getPopupBackground(), listener)
.createBalloon();
Disposer.register(balloon, new Disposable() {
public void dispose() {
window.setPlaceholderMode(false);
stripe.updateState();
stripe.revalidate();
stripe.repaint();
}
});
final StripeButton button = stripe.getButtonFor(toolWindowId);
if (button == null) return;
final Runnable show = new Runnable() {
public void run() {
if (button.isShowing()) {
final Point point = new Point(button.getBounds().width / 2, button.getHeight() / 2 - 2);
balloon.show(new RelativePoint(button, point), position.get());
}
else {
final Rectangle bounds = myToolWindowsPane.getBounds();
final Point target = UIUtil.getCenterPoint(bounds, new Dimension(1, 1));
if (ToolWindowAnchor.TOP == anchor) {
target.y = 0;
}
else if (ToolWindowAnchor.BOTTOM == anchor) {
target.y = bounds.height;
}
else if (ToolWindowAnchor.LEFT == anchor) {
target.x = 0;
}
else if (ToolWindowAnchor.RIGHT == anchor) {
target.x = bounds.width;
}
balloon.show(new RelativePoint(myToolWindowsPane, target), position.get());
}
}
};
if (!button.isValid()) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
show.run();
}
});
}
else {
show.run();
}
}
public boolean isEditorComponentActive() {
ApplicationManager.getApplication().assertIsDispatchThread();
return myEditorComponentActive;
}
ToolWindowAnchor getToolWindowAnchor(final String id) {
ApplicationManager.getApplication().assertIsDispatchThread();
checkId(id);
return getInfo(id).getAnchor();
}
void setToolWindowAnchor(final String id, final ToolWindowAnchor anchor) {
ApplicationManager.getApplication().assertIsDispatchThread();
setToolWindowAnchor(id, anchor, -1);
}
void setToolWindowAnchor(final String id, final ToolWindowAnchor anchor, final int order) {
ApplicationManager.getApplication().assertIsDispatchThread();
final ArrayList<FinalizableCommand> commandList = new ArrayList<FinalizableCommand>();
setToolWindowAnchorImpl(id, anchor, order, commandList);
execute(commandList);
}
private void setToolWindowAnchorImpl(final String id,
final ToolWindowAnchor anchor,
final int order,
final ArrayList<FinalizableCommand> commandsList) {
checkId(id);
final WindowInfoImpl info = getInfo(id);
if (anchor == info.getAnchor() && order == info.getOrder()) {
return;
}
// if tool window isn't visible or only order number is changed then just remove/add stripe button
if (!info.isVisible() || anchor == info.getAnchor() || info.isFloating()) {
appendRemoveButtonCmd(id, commandsList);
myLayout.setAnchor(id, anchor, order);
// update infos for all window. Actually we have to update only infos affected by
// setAnchor method
final WindowInfoImpl[] infos = myLayout.getInfos();
for (WindowInfoImpl info1 : infos) {
appendApplyWindowInfoCmd(info1, commandsList);
}
appendAddButtonCmd(getStripeButton(id), info, commandsList);
}
else { // for docked and sliding windows we have to move buttons and window's decorators
info.setVisible(false);
appendRemoveDecoratorCmd(id, false, commandsList);
appendRemoveButtonCmd(id, commandsList);
myLayout.setAnchor(id, anchor, order);
// update infos for all window. Actually we have to update only infos affected by
// setAnchor method
final WindowInfoImpl[] infos = myLayout.getInfos();
for (WindowInfoImpl info1 : infos) {
appendApplyWindowInfoCmd(info1, commandsList);
}
appendAddButtonCmd(getStripeButton(id), info, commandsList);
showToolWindowImpl(id, false, commandsList);
if (info.isActive()) {
appendRequestFocusInToolWindowCmd(id, commandsList, true);
}
}
}
boolean isSplitMode(String id) {
ApplicationManager.getApplication().assertIsDispatchThread();
checkId(id);
return getInfo(id).isSplit();
}
void setSideTool(String id, boolean isSide) {
final ArrayList<FinalizableCommand> commandList = new ArrayList<FinalizableCommand>();
setSplitModeImpl(id, isSide, commandList);
execute(commandList);
}
void setSideToolAndAnchor(String id, ToolWindowAnchor anchor, int order, boolean isSide) {
final ArrayList<FinalizableCommand> commandList = new ArrayList<FinalizableCommand>();
setToolWindowAnchor(id, anchor, order);
setSplitModeImpl(id, isSide, commandList);
execute(commandList);
}
private void setSplitModeImpl(final String id, final boolean isSplit, final ArrayList<FinalizableCommand> commandList) {
checkId(id);
final WindowInfoImpl info = getInfo(id);
if (isSplit == info.isSplit()) {
return;
}
myLayout.setSplitMode(id, isSplit);
boolean wasActive = info.isActive();
if (wasActive) {
deactivateToolWindowImpl(id, true, commandList);
}
final WindowInfoImpl[] infos = myLayout.getInfos();
for (WindowInfoImpl info1 : infos) {
appendApplyWindowInfoCmd(info1, commandList);
}
if (wasActive) {
activateToolWindowImpl(id, commandList, true, true);
}
commandList.add(myToolWindowsPane.createUpdateButtonPositionCmd(id, myWindowManager.getCommandProcessor()));
}
ToolWindowType getToolWindowInternalType(final String id) {
ApplicationManager.getApplication().assertIsDispatchThread();
checkId(id);
return getInfo(id).getInternalType();
}
ToolWindowType getToolWindowType(final String id) {
ApplicationManager.getApplication().assertIsDispatchThread();
checkId(id);
return getInfo(id).getType();
}
private void fireToolWindowRegistered(final String id) {
final ToolWindowManagerListener[] listeners = myListenerList.getListeners(ToolWindowManagerListener.class);
for (ToolWindowManagerListener listener : listeners) {
listener.toolWindowRegistered(id);
}
}
private void fireStateChanged() {
final ToolWindowManagerListener[] listeners = myListenerList.getListeners(ToolWindowManagerListener.class);
for (ToolWindowManagerListener listener : listeners) {
listener.stateChanged();
}
}
boolean isToolWindowActive(final String id) {
ApplicationManager.getApplication().assertIsDispatchThread();
checkId(id);
return getInfo(id).isActive();
}
boolean isToolWindowAutoHide(final String id) {
ApplicationManager.getApplication().assertIsDispatchThread();
checkId(id);
return getInfo(id).isAutoHide();
}
public boolean isToolWindowFloating(final String id) {
ApplicationManager.getApplication().assertIsDispatchThread();
checkId(id);
return getInfo(id).isFloating();
}
boolean isToolWindowVisible(final String id) {
ApplicationManager.getApplication().assertIsDispatchThread();
checkId(id);
return getInfo(id).isVisible();
}
void setToolWindowAutoHide(final String id, final boolean autoHide) {
ApplicationManager.getApplication().assertIsDispatchThread();
final ArrayList<FinalizableCommand> commandList = new ArrayList<FinalizableCommand>();
setToolWindowAutoHideImpl(id, autoHide, commandList);
execute(commandList);
}
private void setToolWindowAutoHideImpl(final String id, final boolean autoHide, final ArrayList<FinalizableCommand> commandsList) {
checkId(id);
final WindowInfoImpl info = getInfo(id);
if (info.isAutoHide() == autoHide) {
return;
}
info.setAutoHide(autoHide);
appendApplyWindowInfoCmd(info, commandsList);
if (info.isVisible()) {
prepareForActivation(id, commandsList);
showAndActivate(id, false, commandsList, true);
}
}
void setToolWindowType(final String id, final ToolWindowType type) {
ApplicationManager.getApplication().assertIsDispatchThread();
final ArrayList<FinalizableCommand> commandList = new ArrayList<FinalizableCommand>();
setToolWindowTypeImpl(id, type, commandList);
execute(commandList);
}
private void setToolWindowTypeImpl(final String id, final ToolWindowType type, final ArrayList<FinalizableCommand> commandsList) {
checkId(id);
final WindowInfoImpl info = getInfo(id);
if (info.getType() == type) {
return;
}
if (info.isVisible()) {
final boolean dirtyMode = info.isDocked() || info.isSliding();
info.setVisible(false);
if (info.isFloating()) {
appendRemoveFloatingDecoratorCmd(info, commandsList);
}
else { // docked and sliding windows
appendRemoveDecoratorCmd(id, dirtyMode, commandsList);
}
info.setType(type);
appendApplyWindowInfoCmd(info, commandsList);
prepareForActivation(id, commandsList);
showAndActivate(id, dirtyMode, commandsList, true);
appendUpdateToolWindowsPaneCmd(commandsList);
}
else {
info.setType(type);
appendApplyWindowInfoCmd(info, commandsList);
}
}
private void appendApplyWindowInfoCmd(final WindowInfoImpl info, final List<FinalizableCommand> commandsList) {
final StripeButton button = getStripeButton(info.getId());
final InternalDecorator decorator = getInternalDecorator(info.getId());
commandsList.add(new ApplyWindowInfoCmd(info, button, decorator, myWindowManager.getCommandProcessor()));
}
/**
* @see com.intellij.openapi.wm.impl.ToolWindowsPane#createAddDecoratorCmd
*/
private void appendAddDecoratorCmd(final InternalDecorator decorator,
final WindowInfoImpl info,
final boolean dirtyMode,
final List<FinalizableCommand> commandsList) {
final CommandProcessor commandProcessor = myWindowManager.getCommandProcessor();
final FinalizableCommand command = myToolWindowsPane.createAddDecoratorCmd(decorator, info, dirtyMode, commandProcessor);
commandsList.add(command);
}
/**
* @see com.intellij.openapi.wm.impl.ToolWindowsPane#createRemoveDecoratorCmd
*/
private void appendRemoveDecoratorCmd(final String id, final boolean dirtyMode, final List<FinalizableCommand> commandsList) {
final FinalizableCommand command = myToolWindowsPane.createRemoveDecoratorCmd(id, dirtyMode, myWindowManager.getCommandProcessor());
commandsList.add(command);
}
private void appendRemoveFloatingDecoratorCmd(final WindowInfoImpl info, final List<FinalizableCommand> commandsList) {
final RemoveFloatingDecoratorCmd command = new RemoveFloatingDecoratorCmd(info);
commandsList.add(command);
}
/**
* @see com.intellij.openapi.wm.impl.ToolWindowsPane#createAddButtonCmd
*/
private void appendAddButtonCmd(final StripeButton button, final WindowInfoImpl info, final List<FinalizableCommand> commandsList) {
final Comparator comparator = myLayout.comparator(info.getAnchor());
final CommandProcessor commandProcessor = myWindowManager.getCommandProcessor();
final FinalizableCommand command = myToolWindowsPane.createAddButtonCmd(button, info, comparator, commandProcessor);
commandsList.add(command);
}
/**
* @see com.intellij.openapi.wm.impl.ToolWindowsPane#createAddButtonCmd
*/
private void appendRemoveButtonCmd(final String id, final List<FinalizableCommand> commandsList) {
final FinalizableCommand command = myToolWindowsPane.createRemoveButtonCmd(id, myWindowManager.getCommandProcessor());
commandsList.add(command);
}
private ActionCallback appendRequestFocusInEditorComponentCmd(final ArrayList<FinalizableCommand> commandList, boolean forced) {
if (myProject.isDisposed()) return new ActionCallback.Done();
final CommandProcessor commandProcessor = myWindowManager.getCommandProcessor();
final RequestFocusInEditorComponentCmd command =
new RequestFocusInEditorComponentCmd(FileEditorManagerEx.getInstanceEx(myProject), commandProcessor, forced);
commandList.add(command);
return command.getDoneCallback();
}
private void appendRequestFocusInToolWindowCmd(final String id, final ArrayList<FinalizableCommand> commandList, boolean forced) {
final ToolWindowImpl toolWindow = (ToolWindowImpl)getToolWindow(id);
final FocusWatcher focusWatcher = myId2FocusWatcher.get(id);
commandList.add(new RequestFocusInToolWindowCmd(toolWindow, focusWatcher, myWindowManager.getCommandProcessor(), forced));
}
/**
* @see com.intellij.openapi.wm.impl.ToolWindowsPane#createSetEditorComponentCmd
*/
private void appendSetEditorComponentCmd(final JComponent component, final List<FinalizableCommand> commandsList) {
final CommandProcessor commandProcessor = myWindowManager.getCommandProcessor();
final FinalizableCommand command = myToolWindowsPane.createSetEditorComponentCmd(component, commandProcessor);
commandsList.add(command);
}
private void appendUpdateToolWindowsPaneCmd(final List<FinalizableCommand> commandsList) {
final JRootPane rootPane = myFrame.getRootPane();
final FinalizableCommand command = new UpdateRootPaneCmd(rootPane, myWindowManager.getCommandProcessor());
commandsList.add(command);
}
/**
* @return <code>true</code> if tool window with the specified <code>id</code>
* is floating and has modal showing child dialog. Such windows should not be closed
* when auto-hide windows are gone.
*/
private boolean hasModalChild(final WindowInfoImpl info) {
if (!info.isVisible() || !info.isFloating()) {
return false;
}
final FloatingDecorator decorator = getFloatingDecorator(info.getId());
LOG.assertTrue(decorator != null);
return isModalOrHasModalChild(decorator);
}
private static boolean isModalOrHasModalChild(final Window window) {
if (window instanceof Dialog) {
final Dialog dialog = (Dialog)window;
if (dialog.isModal() && dialog.isShowing()) {
return true;
}
final Window[] ownedWindows = dialog.getOwnedWindows();
for (int i = ownedWindows.length - 1; i >= 0; i--) {
if (isModalOrHasModalChild(ownedWindows[i])) {
return true;
}
}
}
return false;
}
/**
* Helper method. It deactivates all tool windows excepting the tool window
* which should be activated.
*/
private void prepareForActivation(final String id, final List<FinalizableCommand> commandList) {
final WindowInfoImpl toBeActivatedInfo = getInfo(id);
final WindowInfoImpl[] infos = myLayout.getInfos();
for (final WindowInfoImpl info : infos) {
if (id.equals(info.getId())) {
continue;
}
if (toBeActivatedInfo.isDocked() || toBeActivatedInfo.isSliding()) {
deactivateToolWindowImpl(info.getId(), info.isAutoHide() || info.isSliding(), commandList);
}
else { // floating window is being activated
deactivateToolWindowImpl(info.getId(), info.isAutoHide() && info.isFloating() && !hasModalChild(info), commandList);
}
}
}
public void clearSideStack() {
mySideStack.clear();
}
public void readExternal(final Element element) {
for (final Object o : element.getChildren()) {
final Element e = (Element)o;
if (EDITOR_ELEMENT.equals(e.getName())) {
myEditorComponentActive = Boolean.valueOf(e.getAttributeValue(ACTIVE_ATTR_VALUE)).booleanValue();
}
else if (DesktopLayout.TAG.equals(e.getName())) { // read layout of tool windows
myLayout.readExternal(e);
}
}
}
public void writeExternal(final Element element) {
if (myFrame == null) {
// do nothing if the project was not opened
return;
}
final String[] ids = getToolWindowIds();
// Update size of all open floating windows. See SCR #18439
for (final String id : ids) {
final WindowInfoImpl info = getInfo(id);
if (info.isVisible()) {
final InternalDecorator decorator = getInternalDecorator(id);
LOG.assertTrue(decorator != null);
decorator.fireResized();
}
}
// Save frame's bounds
final Rectangle frameBounds = myFrame.getBounds();
final Element frameElement = new Element(FRAME_ELEMENT);
element.addContent(frameElement);
frameElement.setAttribute(X_ATTR, Integer.toString(frameBounds.x));
frameElement.setAttribute(Y_ATTR, Integer.toString(frameBounds.y));
frameElement.setAttribute(WIDTH_ATTR, Integer.toString(frameBounds.width));
frameElement.setAttribute(HEIGHT_ATTR, Integer.toString(frameBounds.height));
frameElement.setAttribute(EXTENDED_STATE_ATTR, Integer.toString(myFrame.getExtendedState()));
// Save whether editor is active or not
final Element editorElement = new Element(EDITOR_ELEMENT);
editorElement.setAttribute(ACTIVE_ATTR_VALUE, myEditorComponentActive ? Boolean.TRUE.toString() : Boolean.FALSE.toString());
element.addContent(editorElement);
// Save layout of tool windows
final Element layoutElement = new Element(DesktopLayout.TAG);
element.addContent(layoutElement);
myLayout.writeExternal(layoutElement);
}
public void setDefaultState(@NotNull final ToolWindowImpl toolWindow,
@Nullable final ToolWindowAnchor anchor,
@Nullable final ToolWindowType type,
@Nullable final Rectangle floatingBounds) {
final WindowInfoImpl info = getInfo(toolWindow.getId());
if (info.wasRead()) return;
if (floatingBounds != null) {
info.setFloatingBounds(floatingBounds);
}
if (anchor != null) {
toolWindow.setAnchor(anchor, null);
}
if (type != null) {
toolWindow.setType(type, null);
}
}
public void doWhenFocusSettlesDown(@NotNull final Runnable runnable) {
myIdleRequests.add(runnable);
if (myIdleAlarm.getActiveRequestCount() == 0) {
restartIdleAlarm();
}
}
private void restartIdleAlarm() {
myIdleAlarm.cancelAllRequests();
myIdleAlarm.addRequest(myFlushRunnable, 20);
}
private void flushIdleRequests() {
final Runnable[] all = myIdleRequests.toArray(new Runnable[myIdleRequests.size()]);
myIdleRequests.clear();
for (Runnable each : all) {
each.run();
}
}
public boolean isFocusTranferInProgress() {
return myRequestFocusCmd != null;
}
/**
* This command creates and shows <code>FloatingDecorator</code>.
*/
private final class AddFloatingDecoratorCmd extends FinalizableCommand {
private final FloatingDecorator myFloatingDecorator;
/**
* Creates floating decorator for specified floating decorator.
*/
public AddFloatingDecoratorCmd(final InternalDecorator decorator, final WindowInfoImpl info) {
super(myWindowManager.getCommandProcessor());
myFloatingDecorator = new FloatingDecorator(myFrame, info.copy(), decorator);
myId2FloatingDecorator.put(info.getId(), myFloatingDecorator);
final Rectangle bounds = info.getFloatingBounds();
if (bounds != null &&
bounds.width > 0 &&
bounds.height > 0 &&
myWindowManager.isInsideScreenBounds(bounds.x, bounds.y, bounds.width)) {
myFloatingDecorator.setBounds(bounds);
}
else { // place new frame at the center of main frame if there are no floating bounds
Dimension size = decorator.getSize();
if (size.width == 0 || size.height == 0) {
size = decorator.getPreferredSize();
}
myFloatingDecorator.setSize(size);
myFloatingDecorator.setLocationRelativeTo(myFrame);
}
}
public void run() {
try {
myFloatingDecorator.show();
}
finally {
finish();
}
}
}
/**
* This command hides and destroys floating decorator for tool window
* with specified <code>ID</code>.
*/
private final class RemoveFloatingDecoratorCmd extends FinalizableCommand {
private final FloatingDecorator myFloatingDecorator;
public RemoveFloatingDecoratorCmd(final WindowInfoImpl info) {
super(myWindowManager.getCommandProcessor());
myFloatingDecorator = getFloatingDecorator(info.getId());
myId2FloatingDecorator.remove(info.getId());
info.setFloatingBounds(myFloatingDecorator.getBounds());
}
public void run() {
try {
if (Patches.SPECIAL_WINPUT_METHOD_PROCESSING) {
myFloatingDecorator.remove(myFloatingDecorator.getRootPane());
}
myFloatingDecorator.dispose();
}
finally {
finish();
}
}
@Nullable
public Condition getExpireCondition() {
return Condition.FALSE;
}
}
private final class EditorComponentFocusWatcher extends FocusWatcher {
protected void focusedComponentChanged(final Component component, final AWTEvent cause) {
if (myWindowManager.getCommandProcessor().getCommandCount() > 0 || component == null) {
return;
}
// Sometimes focus gained comes when editor is active. For example it can happen when
// user switches between menus or closes some dialog. In that case we just ignore this event,
// i.e. don't initiate deactivation of tool windows and requesting focus in editor.
if (myEditorComponentActive) {
return;
}
final KeyboardFocusManager mgr = KeyboardFocusManager.getCurrentKeyboardFocusManager();
final Component owner = mgr.getFocusOwner();
IdeFocusManager.getInstance(myProject).doWhenFocusSettlesDown(new Runnable() {
public void run() {
if (mgr.getFocusOwner() == owner) {
activateEditorComponent(false);
}
}
});
}
}
/**
* Notifies window manager about focus traversal in tool window
*/
private final class ToolWindowFocusWatcher extends FocusWatcher {
private final String myId;
public ToolWindowFocusWatcher(final ToolWindowImpl toolWindow) {
myId = toolWindow.getId();
install(toolWindow.getComponent());
}
protected boolean isFocusedComponentChangeValid(final Component comp, final AWTEvent cause) {
return myWindowManager.getCommandProcessor().getCommandCount() == 0 && comp != null;
}
protected void focusedComponentChanged(final Component component, final AWTEvent cause) {
if (myWindowManager.getCommandProcessor().getCommandCount() > 0 || component == null) {
return;
}
final WindowInfoImpl info = getInfo(myId);
myFocusedComponentAlaram.cancelAllRequests();
if (!info.isActive()) {
myFocusedComponentAlaram.addRequest(new Runnable() {
public void run() {
if (!myLayout.isToolWindowRegistered(myId)) return;
activateToolWindow(myId, false, false);
}
}, 100);
}
}
}
/**
* Spies on IdeToolWindow properties and applies them to the window
* state.
*/
private final class MyToolWindowPropertyChangeListener implements PropertyChangeListener {
public void propertyChange(final PropertyChangeEvent e) {
final ToolWindowImpl toolWindow = (ToolWindowImpl)e.getSource();
if (ToolWindowEx.PROP_AVAILABLE.equals(e.getPropertyName())) {
final WindowInfoImpl info = getInfo(toolWindow.getId());
if (!toolWindow.isAvailable() && info.isVisible()) {
hideToolWindow(toolWindow.getId(), false);
}
}
}
}
/**
* Translates events from InternalDecorator into ToolWindowManager method invocations.
*/
private final class MyInternalDecoratorListener implements InternalDecoratorListener {
public void anchorChanged(final InternalDecorator source, final ToolWindowAnchor anchor) {
setToolWindowAnchor(source.getToolWindow().getId(), anchor);
}
public void autoHideChanged(final InternalDecorator source, final boolean autoHide) {
setToolWindowAutoHide(source.getToolWindow().getId(), autoHide);
}
public void hidden(final InternalDecorator source) {
hideToolWindow(source.getToolWindow().getId(), false);
}
public void hiddenSide(final InternalDecorator source) {
hideToolWindow(source.getToolWindow().getId(), true);
}
/**
* Handles event from decorator and modify weight/floating bounds of the
* tool window depending on decoration type.
*/
public void resized(final InternalDecorator source) {
final WindowInfoImpl info = getInfo(source.getToolWindow().getId());
if (info.isFloating()) {
final Window owner = SwingUtilities.getWindowAncestor(source);
if (owner != null) {
info.setFloatingBounds(owner.getBounds());
}
}
else { // docked and sliding windows
if (ToolWindowAnchor.TOP == info.getAnchor() || ToolWindowAnchor.BOTTOM == info.getAnchor()) {
info.setWeight((float)source.getHeight() / (float)myToolWindowsPane.getMyLayeredPane().getHeight());
float newSideWeight = (float)source.getWidth() / (float)myToolWindowsPane.getMyLayeredPane().getWidth();
if (newSideWeight < 1.0f) {
info.setSideWeight(newSideWeight);
}
}
else {
info.setWeight((float)source.getWidth() / (float)myToolWindowsPane.getMyLayeredPane().getWidth());
float newSideWeight = (float)source.getHeight() / (float)myToolWindowsPane.getMyLayeredPane().getHeight();
if (newSideWeight < 1.0f) {
info.setSideWeight(newSideWeight);
}
}
}
}
public void activated(final InternalDecorator source) {
activateToolWindow(source.getToolWindow().getId(), true, true);
}
public void typeChanged(final InternalDecorator source, final ToolWindowType type) {
setToolWindowType(source.getToolWindow().getId(), type);
}
public void sideStatusChanged(final InternalDecorator source, final boolean isSideTool) {
setSideTool(source.getToolWindow().getId(), isSideTool);
}
}
private void updateComponentTreeUI() {
ApplicationManager.getApplication().assertIsDispatchThread();
final WindowInfoImpl[] infos = myLayout.getInfos();
for (final WindowInfoImpl info : infos) {
if (info.isVisible()) { // skip visible tool windows (optimization)
continue;
}
SwingUtilities.updateComponentTreeUI(getInternalDecorator(info.getId()));
}
}
private final class MyUIManagerPropertyChangeListener implements PropertyChangeListener {
public void propertyChange(final PropertyChangeEvent e) {
updateComponentTreeUI();
}
}
private final class MyLafManagerListener implements LafManagerListener {
public void lookAndFeelChanged(final LafManager source) {
updateComponentTreeUI();
}
}
public WindowManagerEx getWindowManager() {
return myWindowManager;
}
@NotNull
public String getComponentName() {
return "ToolWindowManager";
}
public ToolWindowsPane getToolWindowsPane() {
return myToolWindowsPane;
}
public ActionCallback requestFocus(final Component c, final boolean forced) {
return requestFocus(new FocusCommand.ByComponent(c), forced);
}
public ActionCallback requestFocus(final FocusCommand command, final boolean forced) {
final ActionCallback result = new ActionCallback();
if (!forced) {
LaterInvocator.invokeLater(new Runnable() {
public void run() {
_requestFocus(command, forced, result);
}
});
}
else {
_requestFocus(command, forced, result);
}
return result;
}
private void _requestFocus(final FocusCommand command, final boolean forced, final ActionCallback result) {
if (checkForRejectOrByPass(command, forced, result)) return;
restartIdleAlarm();
myRequestFocusCmd = command;
if (forced) {
myForcedFocusRequestsAlarm.cancelAllRequests();
setLastEffectiveForcedRequest(command);
}
fixStickingDialogs();
LaterInvocator.invokeLater(new Runnable() {
public void run() {
if (checkForRejectOrByPass(command, forced, result)) return;
if (myRequestFocusCmd == command) {
myRequestFocusCmd = null;
command.run().doWhenDone(new Runnable() {
public void run() {
LaterInvocator.invokeLater(new Runnable() {
public void run() {
result.setDone();
}
});
}
}).doWhenRejected(new Runnable() {
public void run() {
result.setRejected();
}
});
restartIdleAlarm();
if (forced) {
myForcedFocusRequestsAlarm.addRequest(new Runnable() {
public void run() {
setLastEffectiveForcedRequest(null);
}
}, 250);
}
}
else {
rejectCommand(command, result);
}
}
});
}
private void fixStickingDialogs() {
if (!Patches.STICKY_DIALOGS) return;
final KeyboardFocusManager mgr = KeyboardFocusManager.getCurrentKeyboardFocusManager();
final Window wnd = mgr.getActiveWindow();
if (wnd != null && !wnd.isShowing() && wnd.getParent() instanceof Window) {
final Container parent = wnd.getParent();
final Method setActive = ReflectionUtil.findMethod(KeyboardFocusManager.class.getDeclaredMethods(), "setGlobalActiveWindow", Window.class);
if (setActive != null) {
try {
setActive.setAccessible(true);
setActive.invoke(mgr, (Window)parent);
}
catch (Exception e) {
LOG.info(e);
}
}
}
}
private boolean checkForRejectOrByPass(final FocusCommand cmd, final boolean forced, final ActionCallback result) {
if (cmd.isExpired()) {
rejectCommand(cmd, result);
return true;
}
final FocusCommand lastRequest = getLastEffectiveForcedRequest();
if (!forced && !isUnforcedRequestAllowed()) {
if (cmd.equals(lastRequest)) {
result.setDone();
}
else {
rejectCommand(cmd, result);
}
return true;
}
if (lastRequest != null && lastRequest.dominatesOver(cmd)) {
rejectCommand(cmd, result);
return true;
}
if (!myApp.isActive() && !canExecuteOnInactiveApplication(cmd)) {
if (myCallbackOnActivation != null) {
myCallbackOnActivation.setRejected();
}
myFocusCommandOnAppActivation = cmd;
myCallbackOnActivation = result;
return true;
}
return false;
}
private void rejectCommand(FocusCommand cmd, ActionCallback callback) {
if (myRequestFocusCmd == cmd) {
resetCurrentCommand();
}
callback.setRejected();
}
private void resetCurrentCommand() {
myRequestFocusCmd = null;
}
private boolean canExecuteOnInactiveApplication(FocusCommand cmd) {
return !Patches.REQUEST_FOCUS_MAY_ACTIVATE_APP || cmd.canExecuteOnInactiveApp();
}
private void setLastEffectiveForcedRequest(FocusCommand command) {
myLastForcedRequest = new WeakReference<FocusCommand>(command);
}
@Nullable
private FocusCommand getLastEffectiveForcedRequest() {
if (myLastForcedRequest == null) return null;
final FocusCommand request = myLastForcedRequest.get();
return request != null && !request.isExpired() ? request : null;
}
private boolean isUnforcedRequestAllowed() {
return getLastEffectiveForcedRequest() == null;
}
private boolean isProjectComponent(Component c) {
final Component frame = UIUtil.findUltimateParent(c);
if (frame instanceof IdeFrame) {
return frame == myWindowManager.getFrame(myProject);
} else {
return false;
}
}
private class AppListener extends ApplicationAdapter {
@Override
public void applicationDeactivated(IdeFrame ideFrame) {
Component c = getLastFocusedProjectComponent();
if (c == null) {
final Component owner = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner();
if (isProjectComponent(owner)) {
c = owner;
}
}
myFocusedComponentOnDeactivation = c != null ? new WeakReference<Component>(c) : null;
}
@Override
public void applicationActivated(IdeFrame ideFrame) {
final FocusCommand cmd = myFocusCommandOnAppActivation;
ActionCallback callback = myCallbackOnActivation;
myFocusCommandOnAppActivation = null;
myCallbackOnActivation = null;
if (cmd != null && !cmd.isExpired()) {
requestFocus(cmd, true).notifyWhenDone(callback);
} else {
final KeyboardFocusManager mgr = KeyboardFocusManager.getCurrentKeyboardFocusManager();
if (ideFrame == myWindowManager.getFrame(myProject)) {
final Component owner = mgr.getFocusOwner();
Component old = myFocusedComponentOnDeactivation != null ? myFocusedComponentOnDeactivation.get() : null;
if (old == null || !old.isShowing()) {
old = IdeFocusTraversalPolicy.getPreferredFocusedComponent(((IdeFrameImpl)ideFrame).getRootPane());
}
if (owner == null && old != null && old.isShowing()) {
requestFocus(old, false);
}
myFocusedComponentOnDeactivation = null;
}
}
}
}
}
| focus fix: appoved focus transfer should not depend on changed modaility state
| platform-impl/src/com/intellij/openapi/wm/impl/ToolWindowManagerImpl.java | focus fix: appoved focus transfer should not depend on changed modaility state |
|
Java | apache-2.0 | ee3e7a48fef01b56ad4ec5101d651fa353f21ef2 | 0 | smartnews/presto,smartnews/presto,smartnews/presto,smartnews/presto,smartnews/presto | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.testing;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.graph.Traverser;
import io.airlift.units.Duration;
import io.trino.Session;
import io.trino.client.StageStats;
import io.trino.client.StatementStats;
import io.trino.execution.FailureInjector.InjectedFailureType;
import io.trino.operator.OperatorStats;
import io.trino.operator.RetryPolicy;
import io.trino.server.DynamicFilterService.DynamicFilterDomainStats;
import io.trino.server.DynamicFilterService.DynamicFiltersStats;
import io.trino.spi.ErrorType;
import io.trino.spi.QueryId;
import io.trino.tpch.TpchTable;
import org.assertj.core.api.AbstractThrowableAssert;
import org.intellij.lang.annotations.Language;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.UUID;
import java.util.concurrent.ThreadLocalRandom;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.function.Supplier;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.common.collect.Streams.stream;
import static io.trino.SystemSessionProperties.ENABLE_DYNAMIC_FILTERING;
import static io.trino.SystemSessionProperties.JOIN_DISTRIBUTION_TYPE;
import static io.trino.SystemSessionProperties.JOIN_REORDERING_STRATEGY;
import static io.trino.execution.FailureInjector.FAILURE_INJECTION_MESSAGE;
import static io.trino.execution.FailureInjector.InjectedFailureType.TASK_FAILURE;
import static io.trino.execution.FailureInjector.InjectedFailureType.TASK_GET_RESULTS_REQUEST_FAILURE;
import static io.trino.execution.FailureInjector.InjectedFailureType.TASK_GET_RESULTS_REQUEST_TIMEOUT;
import static io.trino.execution.FailureInjector.InjectedFailureType.TASK_MANAGEMENT_REQUEST_FAILURE;
import static io.trino.execution.FailureInjector.InjectedFailureType.TASK_MANAGEMENT_REQUEST_TIMEOUT;
import static io.trino.spi.predicate.Domain.singleValue;
import static io.trino.spi.type.BigintType.BIGINT;
import static io.trino.sql.planner.OptimizerConfig.JoinDistributionType.PARTITIONED;
import static io.trino.sql.planner.OptimizerConfig.JoinReorderingStrategy.NONE;
import static io.trino.testing.QueryAssertions.assertEqualsIgnoreOrder;
import static io.trino.testing.sql.TestTable.randomTableSuffix;
import static io.trino.tpch.TpchTable.CUSTOMER;
import static io.trino.tpch.TpchTable.NATION;
import static io.trino.tpch.TpchTable.ORDERS;
import static io.trino.tpch.TpchTable.SUPPLIER;
import static java.lang.Integer.parseInt;
import static java.util.Locale.ENGLISH;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.testng.Assert.assertEquals;
public abstract class BaseFailureRecoveryTest
extends AbstractTestQueryFramework
{
private static final String PARTITIONED_LINEITEM = "partitioned_lineitem";
protected static final int INVOCATION_COUNT = 1;
private static final Duration MAX_ERROR_DURATION = new Duration(5, SECONDS);
private static final Duration REQUEST_TIMEOUT = new Duration(5, SECONDS);
private final RetryPolicy retryPolicy;
protected BaseFailureRecoveryTest(RetryPolicy retryPolicy)
{
this.retryPolicy = requireNonNull(retryPolicy, "retryPolicy is null");
}
@Override
protected final QueryRunner createQueryRunner()
throws Exception
{
return createQueryRunner(
ImmutableList.of(NATION, ORDERS, CUSTOMER, SUPPLIER),
ImmutableMap.<String, String>builder()
.put("query.remote-task.max-error-duration", MAX_ERROR_DURATION.toString())
.put("exchange.max-error-duration", MAX_ERROR_DURATION.toString())
.put("retry-policy", retryPolicy.toString())
.put("retry-initial-delay", "0s")
.put("query-retry-attempts", "1")
.put("task-retry-attempts-overall", "1")
.put("failure-injection.request-timeout", new Duration(REQUEST_TIMEOUT.toMillis() * 2, MILLISECONDS).toString())
// making http timeouts shorter so tests which simulate communication timeouts finish in reasonable amount of time
.put("exchange.http-client.idle-timeout", REQUEST_TIMEOUT.toString())
.put("query.hash-partition-count", "5")
// to trigger spilling
.put("exchange.deduplication-buffer-size", "1kB")
.put("fault-tolerant-execution-task-memory", "1GB")
.buildOrThrow(),
ImmutableMap.<String, String>builder()
// making http timeouts shorter so tests which simulate communication timeouts finish in reasonable amount of time
.put("scheduler.http-client.idle-timeout", REQUEST_TIMEOUT.toString())
.buildOrThrow());
}
protected abstract QueryRunner createQueryRunner(
List<TpchTable<?>> requiredTpchTables,
Map<String, String> configProperties,
Map<String, String> coordinatorProperties)
throws Exception;
@BeforeClass
public void initTables()
throws Exception
{
// setup partitioned fact table for dynamic partition pruning
createPartitionedLineitemTable(PARTITIONED_LINEITEM, ImmutableList.of("orderkey", "partkey", "suppkey"), "suppkey");
}
protected abstract void createPartitionedLineitemTable(String tableName, List<String> columns, String partitionColumn);
protected abstract boolean areWriteRetriesSupported();
@Test(invocationCount = INVOCATION_COUNT)
public void testSimpleSelect()
{
testSelect("SELECT * FROM nation");
}
@Test(invocationCount = INVOCATION_COUNT)
public void testAggregation()
{
testSelect("SELECT orderStatus, count(*) FROM orders GROUP BY orderStatus");
}
@Test(invocationCount = INVOCATION_COUNT)
public void testJoinDynamicFilteringDisabled()
{
@Language("SQL") String selectQuery = "SELECT * FROM partitioned_lineitem JOIN supplier ON partitioned_lineitem.suppkey = supplier.suppkey " +
"AND supplier.name = 'Supplier#000000001'";
testSelect(selectQuery, Optional.of(enableDynamicFiltering(false)));
}
@Test(invocationCount = INVOCATION_COUNT)
public void testJoinDynamicFilteringEnabled()
{
@Language("SQL") String selectQuery = "SELECT * FROM partitioned_lineitem JOIN supplier ON partitioned_lineitem.suppkey = supplier.suppkey " +
"AND supplier.name = 'Supplier#000000001'";
testSelect(
selectQuery,
Optional.of(enableDynamicFiltering(true)),
queryId -> {
DynamicFiltersStats dynamicFiltersStats = getDynamicFilteringStats(queryId);
assertThat(dynamicFiltersStats.getLazyDynamicFilters()).isEqualTo(1);
DynamicFilterDomainStats domainStats = getOnlyElement(dynamicFiltersStats.getDynamicFilterDomainStats());
assertThat(domainStats.getSimplifiedDomain())
.isEqualTo(singleValue(BIGINT, 1L).toString(getSession().toConnectorSession()));
OperatorStats probeStats = searchScanFilterAndProjectOperatorStats(queryId, getQualifiedTableName(PARTITIONED_LINEITEM));
// Currently, stats from all attempts are combined.
// Asserting on multiple of 615L as well in case the probe scan was completed twice
assertThat(probeStats.getInputPositions()).isIn(615L, 1230L);
});
}
protected void testSelect(String query)
{
testSelect(query, Optional.empty());
}
protected void testSelect(String query, Optional<Session> session)
{
testSelect(query, session, queryId -> {});
}
protected void testSelect(String query, Optional<Session> session, Consumer<QueryId> queryAssertion)
{
assertThatQuery(query)
.withSession(session)
.experiencing(TASK_MANAGEMENT_REQUEST_FAILURE)
.at(leafStage())
.failsWithoutRetries(failure -> failure.hasMessageFindingMatch("Error 500 Internal Server Error|Error closing remote buffer, expected 204 got 500"))
.finishesSuccessfully(queryAssertion);
assertThatQuery(query)
.withSession(session)
.experiencing(TASK_GET_RESULTS_REQUEST_FAILURE)
.at(boundaryDistributedStage())
.failsWithoutRetries(failure -> failure.hasMessageFindingMatch("Error 500 Internal Server Error|Error closing remote buffer, expected 204 got 500"))
.finishesSuccessfully(queryAssertion);
assertThatQuery(query)
.withSession(session)
.experiencing(TASK_FAILURE, Optional.of(ErrorType.INTERNAL_ERROR))
.at(leafStage())
.failsWithoutRetries(failure -> failure.hasMessageContaining(FAILURE_INJECTION_MESSAGE))
.finishesSuccessfully(queryAssertion);
assertThatQuery(query)
.withSession(session)
.experiencing(TASK_FAILURE, Optional.of(ErrorType.EXTERNAL))
.at(intermediateDistributedStage())
.failsWithoutRetries(failure -> failure.hasMessageContaining(FAILURE_INJECTION_MESSAGE))
.finishesSuccessfully(queryAssertion);
assertThatQuery(query)
.experiencing(TASK_MANAGEMENT_REQUEST_TIMEOUT)
.at(intermediateDistributedStage())
.failsWithoutRetries(failure -> failure.hasMessageContaining("Encountered too many errors talking to a worker node"))
.finishesSuccessfully();
assertThatQuery(query)
.experiencing(TASK_GET_RESULTS_REQUEST_TIMEOUT)
// using boundary stage so we observe task failures
.at(boundaryDistributedStage())
.failsWithoutRetries(failure -> failure.hasMessageFindingMatch("Encountered too many errors talking to a worker node|Error closing remote buffer.*3 failures"))
.finishesSuccessfully();
}
@Test(invocationCount = INVOCATION_COUNT)
public void testUserFailure()
{
assertThatThrownBy(() -> getQueryRunner().execute("SELECT * FROM nation WHERE regionKey / nationKey - 1 = 0"))
.hasMessageContaining("Division by zero");
assertThatQuery("SELECT * FROM nation")
.experiencing(TASK_FAILURE, Optional.of(ErrorType.USER_ERROR))
.at(leafStage())
.failsAlways(failure -> failure.hasMessageContaining(FAILURE_INJECTION_MESSAGE));
}
@Test(invocationCount = INVOCATION_COUNT)
public void testCreateTable()
{
testTableModification(
Optional.empty(),
"CREATE TABLE <table> AS SELECT * FROM orders",
Optional.of("DROP TABLE <table>"));
}
@Test(invocationCount = INVOCATION_COUNT)
public void testInsert()
{
testTableModification(
Optional.of("CREATE TABLE <table> AS SELECT * FROM orders WITH NO DATA"),
"INSERT INTO <table> SELECT * FROM orders",
Optional.of("DROP TABLE <table>"));
}
@Test(invocationCount = INVOCATION_COUNT)
public void testDelete()
{
testTableModification(
Optional.of("CREATE TABLE <table> AS SELECT * FROM orders"),
"DELETE FROM <table> WHERE orderkey = 1",
Optional.of("DROP TABLE <table>"));
}
@Test(invocationCount = INVOCATION_COUNT)
public void testDeleteWithSubquery()
{
testTableModification(
Optional.of("CREATE TABLE <table> AS SELECT * FROM orders"),
"DELETE FROM <table> WHERE custkey IN (SELECT custkey FROM customer WHERE nationkey = 1)",
Optional.of("DROP TABLE <table>"));
}
@Test(invocationCount = INVOCATION_COUNT)
public void testUpdate()
{
testTableModification(
Optional.of("CREATE TABLE <table> AS SELECT * FROM orders"),
"UPDATE <table> SET shippriority = 101 WHERE custkey = 1",
Optional.of("DROP TABLE <table>"));
}
@Test(invocationCount = INVOCATION_COUNT)
public void testUpdateWithSubquery()
{
testTableModification(
Optional.of("CREATE TABLE <table> AS SELECT * FROM orders"),
"UPDATE <table> SET shippriority = 101 WHERE custkey = (SELECT min(custkey) FROM customer)",
Optional.of("DROP TABLE <table>"));
}
@Test(invocationCount = INVOCATION_COUNT)
public void testAnalyzeStatistics()
{
testNonSelect(
Optional.empty(),
Optional.of("CREATE TABLE <table> AS SELECT * FROM orders"),
"ANALYZE <table>",
Optional.of("DROP TABLE <table>"),
false);
}
@Test(invocationCount = INVOCATION_COUNT)
public void testRefreshMaterializedView()
{
testTableModification(
Optional.of("CREATE MATERIALIZED VIEW <table> AS SELECT * FROM orders"),
"REFRESH MATERIALIZED VIEW <table>",
Optional.of("DROP MATERIALIZED VIEW <table>"));
}
@Test(invocationCount = INVOCATION_COUNT)
public void testExplainAnalyze()
{
testSelect("EXPLAIN ANALYZE SELECT orderStatus, count(*) FROM orders GROUP BY orderStatus");
testTableModification(
Optional.of("CREATE TABLE <table> AS SELECT * FROM orders WITH NO DATA"),
"EXPLAIN ANALYZE INSERT INTO <table> SELECT * FROM orders",
Optional.of("DROP TABLE <table>"));
}
@Test(invocationCount = INVOCATION_COUNT)
public void testRequestTimeouts()
{
// extra test cases not covered by general timeout cases scattered around
assertThatQuery("SELECT * FROM nation")
.experiencing(TASK_MANAGEMENT_REQUEST_TIMEOUT)
.at(leafStage())
.failsWithoutRetries(failure -> failure.hasMessageContaining("Encountered too many errors talking to a worker node"))
.finishesSuccessfully();
assertThatQuery("SELECT * FROM nation")
.experiencing(TASK_MANAGEMENT_REQUEST_TIMEOUT)
.at(boundaryDistributedStage())
.failsWithoutRetries(failure -> failure.hasMessageContaining("Encountered too many errors talking to a worker node"))
.finishesSuccessfully();
if (areWriteRetriesSupported()) {
assertThatQuery("INSERT INTO <table> SELECT * FROM orders")
.withSetupQuery(Optional.of("CREATE TABLE <table> AS SELECT * FROM orders WITH NO DATA"))
.withCleanupQuery(Optional.of("DROP TABLE <table>"))
.experiencing(TASK_GET_RESULTS_REQUEST_TIMEOUT)
.at(leafStage())
.failsWithoutRetries(failure -> failure.hasMessageContaining("Encountered too many errors talking to a worker node"))
// get results timeout for leaf stage will not result in accounted task failure if failure recovery is enabled
.finishesSuccessfullyWithoutTaskFailures();
}
}
protected void testTableModification(Optional<String> setupQuery, String query, Optional<String> cleanupQuery)
{
testTableModification(Optional.empty(), setupQuery, query, cleanupQuery);
}
protected void testTableModification(Optional<Session> session, Optional<String> setupQuery, String query, Optional<String> cleanupQuery)
{
testNonSelect(session, setupQuery, query, cleanupQuery, true);
}
protected void testNonSelect(Optional<Session> session, Optional<String> setupQuery, String query, Optional<String> cleanupQuery, boolean writesData)
{
if (writesData && !areWriteRetriesSupported()) {
// if retries are not supported assert on that and skip actual failures simulation
assertThatQuery(query)
.withSession(session)
.withSetupQuery(setupQuery)
.withCleanupQuery(cleanupQuery)
.failsDespiteRetries(failure -> failure.hasMessageMatching("This connector does not support query retries"));
return;
}
assertThatQuery(query)
.withSession(session)
.withSetupQuery(setupQuery)
.withCleanupQuery(cleanupQuery)
.experiencing(TASK_FAILURE, Optional.of(ErrorType.INTERNAL_ERROR))
.at(boundaryCoordinatorStage())
.failsAlways(failure -> failure.hasMessageContaining(FAILURE_INJECTION_MESSAGE));
assertThatQuery(query)
.withSession(session)
.withSetupQuery(setupQuery)
.withCleanupQuery(cleanupQuery)
.experiencing(TASK_FAILURE, Optional.of(ErrorType.INTERNAL_ERROR))
.at(rootStage())
.failsAlways(failure -> failure.hasMessageContaining(FAILURE_INJECTION_MESSAGE));
assertThatQuery(query)
.withSession(session)
.withSetupQuery(setupQuery)
.withCleanupQuery(cleanupQuery)
.experiencing(TASK_FAILURE, Optional.of(ErrorType.INTERNAL_ERROR))
.at(leafStage())
.failsWithoutRetries(failure -> failure.hasMessageContaining(FAILURE_INJECTION_MESSAGE))
.finishesSuccessfully();
assertThatQuery(query)
.withSession(session)
.withSetupQuery(setupQuery)
.withCleanupQuery(cleanupQuery)
.experiencing(TASK_FAILURE, Optional.of(ErrorType.INTERNAL_ERROR))
.at(boundaryDistributedStage())
.failsWithoutRetries(failure -> failure.hasMessageContaining(FAILURE_INJECTION_MESSAGE))
.finishesSuccessfully();
assertThatQuery(query)
.withSession(session)
.withSetupQuery(setupQuery)
.withCleanupQuery(cleanupQuery)
.experiencing(TASK_FAILURE, Optional.of(ErrorType.INTERNAL_ERROR))
.at(intermediateDistributedStage())
.failsWithoutRetries(failure -> failure.hasMessageContaining(FAILURE_INJECTION_MESSAGE))
.finishesSuccessfully();
assertThatQuery(query)
.withSession(session)
.withSetupQuery(setupQuery)
.withCleanupQuery(cleanupQuery)
.experiencing(TASK_MANAGEMENT_REQUEST_FAILURE)
.at(boundaryDistributedStage())
.failsWithoutRetries(failure -> failure.hasMessageFindingMatch("Error 500 Internal Server Error|Error closing remote buffer, expected 204 got 500"))
.finishesSuccessfully();
assertThatQuery(query)
.withSession(session)
.withSetupQuery(setupQuery)
.withCleanupQuery(cleanupQuery)
.experiencing(TASK_GET_RESULTS_REQUEST_FAILURE)
.at(boundaryDistributedStage())
.failsWithoutRetries(failure -> failure.hasMessageFindingMatch("Error 500 Internal Server Error|Error closing remote buffer, expected 204 got 500"))
.finishesSuccessfully();
assertThatQuery(query)
.withSetupQuery(setupQuery)
.withCleanupQuery(cleanupQuery)
.experiencing(TASK_MANAGEMENT_REQUEST_TIMEOUT)
.at(boundaryDistributedStage())
.failsWithoutRetries(failure -> failure.hasMessageContaining("Encountered too many errors talking to a worker node"))
.finishesSuccessfully();
assertThatQuery(query)
.withSetupQuery(setupQuery)
.withCleanupQuery(cleanupQuery)
.experiencing(TASK_GET_RESULTS_REQUEST_TIMEOUT)
.at(boundaryDistributedStage())
.failsWithoutRetries(failure -> failure.hasMessageContaining("Encountered too many errors talking to a worker node"))
.finishesSuccessfully();
}
private FailureRecoveryAssert assertThatQuery(String query)
{
return new FailureRecoveryAssert(query);
}
protected class FailureRecoveryAssert
{
private final String query;
private Session session = getQueryRunner().getDefaultSession();
private Optional<Function<MaterializedResult, Integer>> stageSelector;
private Optional<InjectedFailureType> failureType = Optional.empty();
private Optional<ErrorType> errorType = Optional.empty();
private Optional<String> setup = Optional.empty();
private Optional<String> cleanup = Optional.empty();
public FailureRecoveryAssert(String query)
{
this.query = requireNonNull(query, "query is null");
}
public FailureRecoveryAssert withSession(Optional<Session> session)
{
requireNonNull(session, "session is null");
session.ifPresent(value -> this.session = value);
return this;
}
public FailureRecoveryAssert withSetupQuery(Optional<String> query)
{
setup = requireNonNull(query, "query is null");
return this;
}
public FailureRecoveryAssert withCleanupQuery(Optional<String> query)
{
cleanup = requireNonNull(query, "query is null");
return this;
}
public FailureRecoveryAssert experiencing(InjectedFailureType failureType)
{
return experiencing(failureType, Optional.empty());
}
public FailureRecoveryAssert experiencing(InjectedFailureType failureType, Optional<ErrorType> errorType)
{
this.failureType = Optional.of(requireNonNull(failureType, "failureType is null"));
this.errorType = requireNonNull(errorType, "errorType is null");
if (failureType == TASK_FAILURE) {
checkArgument(errorType.isPresent(), "error type must be present when injection type is task failure");
}
else {
checkArgument(errorType.isEmpty(), "error type must not be present when injection type is not task failure");
}
return this;
}
public FailureRecoveryAssert at(Function<MaterializedResult, Integer> stageSelector)
{
this.stageSelector = Optional.of(requireNonNull(stageSelector, "stageSelector is null"));
return this;
}
private ExecutionResult executeExpected()
{
return execute(noRetries(session), query, Optional.empty());
}
private ExecutionResult executeActual(OptionalInt failureStageId)
{
return executeActual(session, failureStageId);
}
private ExecutionResult executeActualNoRetries(OptionalInt failureStageId)
{
return executeActual(noRetries(session), failureStageId);
}
private ExecutionResult executeActual(Session session, OptionalInt failureStageId)
{
String token = UUID.randomUUID().toString();
if (failureType.isPresent()) {
getQueryRunner().injectTaskFailure(
token,
failureStageId.orElseThrow(() -> new IllegalArgumentException("failure stageId not provided")),
0,
0,
failureType.get(),
errorType);
return execute(session, query, Optional.of(token));
}
// no failure injected
return execute(session, query, Optional.of(token));
}
private ExecutionResult execute(Session session, String query, Optional<String> traceToken)
{
String tableName = "table_" + randomTableSuffix();
setup.ifPresent(sql -> getQueryRunner().execute(noRetries(session), resolveTableName(sql, tableName)));
ResultWithQueryId<MaterializedResult> resultWithQueryId = null;
RuntimeException failure = null;
try {
resultWithQueryId = getDistributedQueryRunner().executeWithQueryId(withTraceToken(session, traceToken), resolveTableName(query, tableName));
}
catch (RuntimeException e) {
failure = e;
}
MaterializedResult result = resultWithQueryId == null ? null : resultWithQueryId.getResult();
Optional<MaterializedResult> updatedTableContent = Optional.empty();
if (result != null && result.getUpdateCount().isPresent()) {
updatedTableContent = Optional.of(getQueryRunner().execute(noRetries(session), "SELECT * FROM " + tableName));
}
Optional<MaterializedResult> updatedTableStatistics = Optional.empty();
if (result != null && result.getUpdateType().isPresent() && result.getUpdateType().get().equals("ANALYZE")) {
updatedTableStatistics = Optional.of(getQueryRunner().execute(noRetries(session), "SHOW STATS FOR " + tableName));
}
try {
cleanup.ifPresent(sql -> getQueryRunner().execute(noRetries(session), resolveTableName(sql, tableName)));
}
catch (RuntimeException e) {
if (failure == null) {
failure = e;
}
else if (failure != e) {
failure.addSuppressed(e);
}
}
if (failure != null) {
throw failure;
}
return new ExecutionResult(resultWithQueryId, updatedTableContent, updatedTableStatistics);
}
public void finishesSuccessfully()
{
finishesSuccessfully(queryId -> {});
}
public void finishesSuccessfullyWithoutTaskFailures()
{
finishesSuccessfully(queryId -> {}, false);
}
private void finishesSuccessfully(Consumer<QueryId> queryAssertion)
{
finishesSuccessfully(queryAssertion, true);
}
public void finishesSuccessfully(Consumer<QueryId> queryAssertion, boolean expectTaskFailures)
{
verifyFailureTypeAndStageSelector();
ExecutionResult expected = executeExpected();
MaterializedResult expectedQueryResult = expected.getQueryResult();
OptionalInt failureStageId = getFailureStageId(() -> expectedQueryResult);
ExecutionResult actual = executeActual(failureStageId);
assertEquals(getStageStats(actual.getQueryResult(), failureStageId.getAsInt()).getFailedTasks(), expectTaskFailures ? 1 : 0);
MaterializedResult actualQueryResult = actual.getQueryResult();
boolean isAnalyze = expectedQueryResult.getUpdateType().isPresent() && expectedQueryResult.getUpdateType().get().equals("ANALYZE");
boolean isUpdate = expectedQueryResult.getUpdateCount().isPresent();
boolean isExplain = query.trim().toUpperCase(ENGLISH).startsWith("EXPLAIN");
if (isAnalyze) {
assertEquals(actualQueryResult.getUpdateCount(), expectedQueryResult.getUpdateCount());
assertThat(expected.getUpdatedTableStatistics()).isPresent();
assertThat(actual.getUpdatedTableStatistics()).isPresent();
MaterializedResult expectedUpdatedTableStatistics = expected.getUpdatedTableStatistics().get();
MaterializedResult actualUpdatedTableStatistics = actual.getUpdatedTableStatistics().get();
assertEqualsIgnoreOrder(actualUpdatedTableStatistics, expectedUpdatedTableStatistics, "For query: \n " + query);
}
else if (isUpdate) {
assertEquals(actualQueryResult.getUpdateCount(), expectedQueryResult.getUpdateCount());
assertThat(expected.getUpdatedTableContent()).isPresent();
assertThat(actual.getUpdatedTableContent()).isPresent();
MaterializedResult expectedUpdatedTableContent = expected.getUpdatedTableContent().get();
MaterializedResult actualUpdatedTableContent = actual.getUpdatedTableContent().get();
assertEqualsIgnoreOrder(actualUpdatedTableContent, expectedUpdatedTableContent, "For query: \n " + query);
}
else if (isExplain) {
assertEquals(actualQueryResult.getRowCount(), expectedQueryResult.getRowCount());
}
else {
assertEqualsIgnoreOrder(actualQueryResult, expectedQueryResult, "For query: \n " + query);
}
queryAssertion.accept(actual.getQueryId());
}
public FailureRecoveryAssert failsAlways(Consumer<AbstractThrowableAssert<?, ? extends Throwable>> failureAssertion)
{
failsWithoutRetries(failureAssertion);
failsDespiteRetries(failureAssertion);
return this;
}
public FailureRecoveryAssert failsWithoutRetries(Consumer<AbstractThrowableAssert<?, ? extends Throwable>> failureAssertion)
{
verifyFailureTypeAndStageSelector();
OptionalInt failureStageId = getFailureStageId(() -> executeExpected().getQueryResult());
failureAssertion.accept(assertThatThrownBy(() -> executeActualNoRetries(failureStageId)));
return this;
}
public FailureRecoveryAssert failsDespiteRetries(Consumer<AbstractThrowableAssert<?, ? extends Throwable>> failureAssertion)
{
verifyFailureTypeAndStageSelector();
OptionalInt failureStageId = getFailureStageId(() -> executeExpected().getQueryResult());
failureAssertion.accept(assertThatThrownBy(() -> executeActual(failureStageId)));
return this;
}
private void verifyFailureTypeAndStageSelector()
{
assertThat(failureType.isPresent() == stageSelector.isPresent()).withFailMessage("Either both or none of failureType and stageSelector must be set").isTrue();
}
private OptionalInt getFailureStageId(Supplier<MaterializedResult> expectedQueryResult)
{
if (stageSelector.isEmpty()) {
return OptionalInt.empty();
}
// only compute MaterializedResult if needed
return OptionalInt.of(stageSelector.get().apply(expectedQueryResult.get()));
}
private String resolveTableName(String query, String tableName)
{
return query.replaceAll("<table>", tableName);
}
private Session noRetries(Session session)
{
return Session.builder(session)
.setSystemProperty("retry_policy", "NONE")
.build();
}
private Session withTraceToken(Session session, Optional<String> traceToken)
{
return Session.builder(session)
.setTraceToken(traceToken)
.build();
}
}
private static class ExecutionResult
{
private final MaterializedResult queryResult;
private final QueryId queryId;
private final Optional<MaterializedResult> updatedTableContent;
private final Optional<MaterializedResult> updatedTableStatistics;
private ExecutionResult(
ResultWithQueryId<MaterializedResult> resultWithQueryId,
Optional<MaterializedResult> updatedTableContent,
Optional<MaterializedResult> updatedTableStatistics)
{
requireNonNull(resultWithQueryId, "resultWithQueryId is null");
this.queryResult = resultWithQueryId.getResult();
this.queryId = resultWithQueryId.getQueryId();
this.updatedTableContent = requireNonNull(updatedTableContent, "updatedTableContent is null");
this.updatedTableStatistics = requireNonNull(updatedTableStatistics, "updatedTableStatistics is null");
}
public MaterializedResult getQueryResult()
{
return queryResult;
}
public QueryId getQueryId()
{
return queryId;
}
public Optional<MaterializedResult> getUpdatedTableContent()
{
return updatedTableContent;
}
public Optional<MaterializedResult> getUpdatedTableStatistics()
{
return updatedTableStatistics;
}
}
protected static Function<MaterializedResult, Integer> rootStage()
{
return result -> parseInt(getRootStage(result).getStageId());
}
protected static Function<MaterializedResult, Integer> boundaryCoordinatorStage()
{
return result -> findStageId(result, stage -> stage.isCoordinatorOnly() && stage.getSubStages().stream().noneMatch(StageStats::isCoordinatorOnly));
}
protected static Function<MaterializedResult, Integer> boundaryDistributedStage()
{
return result -> {
StageStats rootStage = getRootStage(result);
if (!rootStage.isCoordinatorOnly()) {
return parseInt(rootStage.getStageId());
}
StageStats boundaryCoordinatorStage = findStage(result, stage -> stage.isCoordinatorOnly() && stage.getSubStages().stream().noneMatch(StageStats::isCoordinatorOnly));
StageStats boundaryDistributedStage = boundaryCoordinatorStage.getSubStages().get(ThreadLocalRandom.current().nextInt(boundaryCoordinatorStage.getSubStages().size()));
return parseInt(boundaryDistributedStage.getStageId());
};
}
protected static Function<MaterializedResult, Integer> intermediateDistributedStage()
{
return result -> findStageId(result, stage -> !stage.isCoordinatorOnly() && !stage.getSubStages().isEmpty());
}
protected static Function<MaterializedResult, Integer> leafStage()
{
return result -> findStageId(result, stage -> stage.getSubStages().isEmpty());
}
private static int findStageId(MaterializedResult result, Predicate<StageStats> predicate)
{
return parseInt(findStage(result, predicate).getStageId());
}
private static StageStats findStage(MaterializedResult result, Predicate<StageStats> predicate)
{
List<StageStats> stages = stream(Traverser.forTree(StageStats::getSubStages).breadthFirst(getRootStage(result)))
.filter(predicate)
.collect(toImmutableList());
if (stages.isEmpty()) {
throw new IllegalArgumentException("stage not found");
}
return stages.get(ThreadLocalRandom.current().nextInt(stages.size()));
}
private static StageStats getStageStats(MaterializedResult result, int stageId)
{
return stream(Traverser.forTree(StageStats::getSubStages).breadthFirst(getRootStage(result)))
.filter(stageStats -> parseInt(stageStats.getStageId()) == stageId)
.findFirst()
.orElseThrow(() -> new IllegalArgumentException("stage stats not found: " + stageId));
}
private static StageStats getRootStage(MaterializedResult result)
{
StatementStats statementStats = result.getStatementStats().orElseThrow(() -> new IllegalArgumentException("statement stats is not present"));
return requireNonNull(statementStats.getRootStage(), "root stage is null");
}
private Session enableDynamicFiltering(boolean enabled)
{
Session defaultSession = getQueryRunner().getDefaultSession();
return Session.builder(defaultSession)
.setSystemProperty(ENABLE_DYNAMIC_FILTERING, Boolean.toString(enabled))
.setSystemProperty(JOIN_REORDERING_STRATEGY, NONE.name())
.setSystemProperty(JOIN_DISTRIBUTION_TYPE, PARTITIONED.name())
.setCatalogSessionProperty(defaultSession.getCatalog().orElseThrow(), "dynamic_filtering_wait_timeout", "1h")
.build();
}
}
| testing/trino-testing/src/main/java/io/trino/testing/BaseFailureRecoveryTest.java | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.testing;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.graph.Traverser;
import io.airlift.units.Duration;
import io.trino.Session;
import io.trino.client.StageStats;
import io.trino.client.StatementStats;
import io.trino.execution.FailureInjector.InjectedFailureType;
import io.trino.operator.OperatorStats;
import io.trino.operator.RetryPolicy;
import io.trino.server.DynamicFilterService.DynamicFilterDomainStats;
import io.trino.server.DynamicFilterService.DynamicFiltersStats;
import io.trino.spi.ErrorType;
import io.trino.spi.QueryId;
import io.trino.tpch.TpchTable;
import org.assertj.core.api.AbstractThrowableAssert;
import org.intellij.lang.annotations.Language;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.UUID;
import java.util.concurrent.ThreadLocalRandom;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.function.Supplier;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.common.collect.Streams.stream;
import static io.trino.SystemSessionProperties.ENABLE_DYNAMIC_FILTERING;
import static io.trino.SystemSessionProperties.JOIN_DISTRIBUTION_TYPE;
import static io.trino.SystemSessionProperties.JOIN_REORDERING_STRATEGY;
import static io.trino.execution.FailureInjector.FAILURE_INJECTION_MESSAGE;
import static io.trino.execution.FailureInjector.InjectedFailureType.TASK_FAILURE;
import static io.trino.execution.FailureInjector.InjectedFailureType.TASK_GET_RESULTS_REQUEST_FAILURE;
import static io.trino.execution.FailureInjector.InjectedFailureType.TASK_GET_RESULTS_REQUEST_TIMEOUT;
import static io.trino.execution.FailureInjector.InjectedFailureType.TASK_MANAGEMENT_REQUEST_FAILURE;
import static io.trino.execution.FailureInjector.InjectedFailureType.TASK_MANAGEMENT_REQUEST_TIMEOUT;
import static io.trino.spi.predicate.Domain.singleValue;
import static io.trino.spi.type.BigintType.BIGINT;
import static io.trino.sql.planner.OptimizerConfig.JoinDistributionType.PARTITIONED;
import static io.trino.sql.planner.OptimizerConfig.JoinReorderingStrategy.NONE;
import static io.trino.testing.QueryAssertions.assertEqualsIgnoreOrder;
import static io.trino.testing.sql.TestTable.randomTableSuffix;
import static io.trino.tpch.TpchTable.CUSTOMER;
import static io.trino.tpch.TpchTable.NATION;
import static io.trino.tpch.TpchTable.ORDERS;
import static io.trino.tpch.TpchTable.SUPPLIER;
import static java.lang.Integer.parseInt;
import static java.util.Locale.ENGLISH;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.testng.Assert.assertEquals;
public abstract class BaseFailureRecoveryTest
extends AbstractTestQueryFramework
{
private static final String PARTITIONED_LINEITEM = "partitioned_lineitem";
protected static final int INVOCATION_COUNT = 1;
private static final Duration MAX_ERROR_DURATION = new Duration(5, SECONDS);
private static final Duration REQUEST_TIMEOUT = new Duration(5, SECONDS);
private final RetryPolicy retryPolicy;
protected BaseFailureRecoveryTest(RetryPolicy retryPolicy)
{
this.retryPolicy = requireNonNull(retryPolicy, "retryPolicy is null");
}
@Override
protected final QueryRunner createQueryRunner()
throws Exception
{
return createQueryRunner(
ImmutableList.of(NATION, ORDERS, CUSTOMER, SUPPLIER),
ImmutableMap.<String, String>builder()
.put("query.remote-task.max-error-duration", MAX_ERROR_DURATION.toString())
.put("exchange.max-error-duration", MAX_ERROR_DURATION.toString())
.put("retry-policy", retryPolicy.toString())
.put("retry-initial-delay", "0s")
.put("query-retry-attempts", "1")
.put("task-retry-attempts-overall", "1")
.put("failure-injection.request-timeout", new Duration(REQUEST_TIMEOUT.toMillis() * 2, MILLISECONDS).toString())
// making http timeouts shorter so tests which simulate communication timeouts finish in reasonable amount of time
.put("exchange.http-client.idle-timeout", REQUEST_TIMEOUT.toString())
.put("query.hash-partition-count", "5")
// to trigger spilling
.put("exchange.deduplication-buffer-size", "1kB")
.put("fault-tolerant-execution-task-memory", "1GB")
.buildOrThrow(),
ImmutableMap.<String, String>builder()
// making http timeouts shorter so tests which simulate communication timeouts finish in reasonable amount of time
.put("scheduler.http-client.idle-timeout", REQUEST_TIMEOUT.toString())
.buildOrThrow());
}
protected abstract QueryRunner createQueryRunner(
List<TpchTable<?>> requiredTpchTables,
Map<String, String> configProperties,
Map<String, String> coordinatorProperties)
throws Exception;
@BeforeClass
public void initTables()
throws Exception
{
// setup partitioned fact table for dynamic partition pruning
createPartitionedLineitemTable(PARTITIONED_LINEITEM, ImmutableList.of("orderkey", "partkey", "suppkey"), "suppkey");
}
protected abstract void createPartitionedLineitemTable(String tableName, List<String> columns, String partitionColumn);
protected abstract boolean areWriteRetriesSupported();
@Test(invocationCount = INVOCATION_COUNT)
public void testSimpleSelect()
{
testSelect("SELECT * FROM nation");
}
@Test(invocationCount = INVOCATION_COUNT)
public void testAggregation()
{
testSelect("SELECT orderStatus, count(*) FROM orders GROUP BY orderStatus");
}
@Test(invocationCount = INVOCATION_COUNT)
public void testJoinDynamicFilteringDisabled()
{
@Language("SQL") String selectQuery = "SELECT * FROM partitioned_lineitem JOIN supplier ON partitioned_lineitem.suppkey = supplier.suppkey " +
"AND supplier.name = 'Supplier#000000001'";
testSelect(selectQuery, Optional.of(enableDynamicFiltering(false)));
}
@Test(invocationCount = INVOCATION_COUNT)
public void testJoinDynamicFilteringEnabled()
{
@Language("SQL") String selectQuery = "SELECT * FROM partitioned_lineitem JOIN supplier ON partitioned_lineitem.suppkey = supplier.suppkey " +
"AND supplier.name = 'Supplier#000000001'";
testSelect(
selectQuery,
Optional.of(enableDynamicFiltering(true)),
queryId -> {
DynamicFiltersStats dynamicFiltersStats = getDynamicFilteringStats(queryId);
assertThat(dynamicFiltersStats.getLazyDynamicFilters()).isEqualTo(1);
DynamicFilterDomainStats domainStats = getOnlyElement(dynamicFiltersStats.getDynamicFilterDomainStats());
assertThat(domainStats.getSimplifiedDomain())
.isEqualTo(singleValue(BIGINT, 1L).toString(getSession().toConnectorSession()));
OperatorStats probeStats = searchScanFilterAndProjectOperatorStats(queryId, getQualifiedTableName(PARTITIONED_LINEITEM));
// Currently, stats from all attempts are combined.
// Asserting on multiple of 615L as well in case the probe scan was completed twice
assertThat(probeStats.getInputPositions()).isIn(615L, 1230L);
});
}
protected void testSelect(String query)
{
testSelect(query, Optional.empty());
}
protected void testSelect(String query, Optional<Session> session)
{
testSelect(query, session, queryId -> {});
}
protected void testSelect(String query, Optional<Session> session, Consumer<QueryId> queryAssertion)
{
assertThatQuery(query)
.withSession(session)
.experiencing(TASK_MANAGEMENT_REQUEST_FAILURE)
.at(leafStage())
.failsWithoutRetries(failure -> failure.hasMessageFindingMatch("Error 500 Internal Server Error|Error closing remote buffer, expected 204 got 500"))
.finishesSuccessfully(queryAssertion);
assertThatQuery(query)
.withSession(session)
.experiencing(TASK_GET_RESULTS_REQUEST_FAILURE)
.at(boundaryDistributedStage())
.failsWithoutRetries(failure -> failure.hasMessageFindingMatch("Error 500 Internal Server Error|Error closing remote buffer, expected 204 got 500"))
.finishesSuccessfully(queryAssertion);
assertThatQuery(query)
.withSession(session)
.experiencing(TASK_FAILURE, Optional.of(ErrorType.INTERNAL_ERROR))
.at(leafStage())
.failsWithoutRetries(failure -> failure.hasMessageContaining(FAILURE_INJECTION_MESSAGE))
.finishesSuccessfully(queryAssertion);
assertThatQuery(query)
.withSession(session)
.experiencing(TASK_FAILURE, Optional.of(ErrorType.EXTERNAL))
.at(intermediateDistributedStage())
.failsWithoutRetries(failure -> failure.hasMessageContaining(FAILURE_INJECTION_MESSAGE))
.finishesSuccessfully(queryAssertion);
assertThatQuery(query)
.experiencing(TASK_MANAGEMENT_REQUEST_TIMEOUT)
.at(intermediateDistributedStage())
.failsWithoutRetries(failure -> failure.hasMessageContaining("Encountered too many errors talking to a worker node"))
.finishesSuccessfully();
assertThatQuery(query)
.experiencing(TASK_GET_RESULTS_REQUEST_TIMEOUT)
// using boundary stage so we observe task failures
.at(boundaryDistributedStage())
.failsWithoutRetries(failure -> failure.hasMessageFindingMatch("Encountered too many errors talking to a worker node|Error closing remote buffer.*3 failures"))
.finishesSuccessfully();
}
@Test(invocationCount = INVOCATION_COUNT)
public void testUserFailure()
{
assertThatThrownBy(() -> getQueryRunner().execute("SELECT * FROM nation WHERE regionKey / nationKey - 1 = 0"))
.hasMessageContaining("Division by zero");
assertThatQuery("SELECT * FROM nation")
.experiencing(TASK_FAILURE, Optional.of(ErrorType.USER_ERROR))
.at(leafStage())
.failsAlways(failure -> failure.hasMessageContaining(FAILURE_INJECTION_MESSAGE));
}
@Test(invocationCount = INVOCATION_COUNT)
public void testCreateTable()
{
testTableModification(
Optional.empty(),
"CREATE TABLE <table> AS SELECT * FROM orders",
Optional.of("DROP TABLE <table>"));
}
@Test(invocationCount = INVOCATION_COUNT)
public void testInsert()
{
testTableModification(
Optional.of("CREATE TABLE <table> AS SELECT * FROM orders WITH NO DATA"),
"INSERT INTO <table> SELECT * FROM orders",
Optional.of("DROP TABLE <table>"));
}
@Test(invocationCount = INVOCATION_COUNT)
public void testDelete()
{
testTableModification(
Optional.of("CREATE TABLE <table> AS SELECT * FROM orders"),
"DELETE FROM orders WHERE orderkey = 1",
Optional.of("DROP TABLE <table>"));
}
@Test(invocationCount = INVOCATION_COUNT)
public void testDeleteWithSubquery()
{
testTableModification(
Optional.of("CREATE TABLE <table> AS SELECT * FROM orders"),
"DELETE FROM orders WHERE custkey IN (SELECT custkey FROM customer WHERE nationkey = 1)",
Optional.of("DROP TABLE <table>"));
}
@Test(invocationCount = INVOCATION_COUNT)
public void testUpdate()
{
testTableModification(
Optional.of("CREATE TABLE <table> AS SELECT * FROM orders"),
"UPDATE orders SET shippriority = 101 WHERE custkey = 1",
Optional.of("DROP TABLE <table>"));
}
@Test(invocationCount = INVOCATION_COUNT)
public void testUpdateWithSubquery()
{
testTableModification(
Optional.of("CREATE TABLE <table> AS SELECT * FROM orders"),
"UPDATE orders SET shippriority = 101 WHERE custkey = (SELECT min(custkey) FROM customer)",
Optional.of("DROP TABLE <table>"));
}
@Test(invocationCount = INVOCATION_COUNT)
public void testAnalyzeStatistics()
{
testNonSelect(
Optional.empty(),
Optional.of("CREATE TABLE <table> AS SELECT * FROM orders"),
"ANALYZE <table>",
Optional.of("DROP TABLE <table>"),
false);
}
@Test(invocationCount = INVOCATION_COUNT)
public void testRefreshMaterializedView()
{
testTableModification(
Optional.of("CREATE MATERIALIZED VIEW <table> AS SELECT * FROM orders"),
"REFRESH MATERIALIZED VIEW <table>",
Optional.of("DROP MATERIALIZED VIEW <table>"));
}
@Test(invocationCount = INVOCATION_COUNT)
public void testExplainAnalyze()
{
testSelect("EXPLAIN ANALYZE SELECT orderStatus, count(*) FROM orders GROUP BY orderStatus");
testTableModification(
Optional.of("CREATE TABLE <table> AS SELECT * FROM orders WITH NO DATA"),
"EXPLAIN ANALYZE INSERT INTO <table> SELECT * FROM orders",
Optional.of("DROP TABLE <table>"));
}
@Test(invocationCount = INVOCATION_COUNT)
public void testRequestTimeouts()
{
// extra test cases not covered by general timeout cases scattered around
assertThatQuery("SELECT * FROM nation")
.experiencing(TASK_MANAGEMENT_REQUEST_TIMEOUT)
.at(leafStage())
.failsWithoutRetries(failure -> failure.hasMessageContaining("Encountered too many errors talking to a worker node"))
.finishesSuccessfully();
assertThatQuery("SELECT * FROM nation")
.experiencing(TASK_MANAGEMENT_REQUEST_TIMEOUT)
.at(boundaryDistributedStage())
.failsWithoutRetries(failure -> failure.hasMessageContaining("Encountered too many errors talking to a worker node"))
.finishesSuccessfully();
if (areWriteRetriesSupported()) {
assertThatQuery("INSERT INTO <table> SELECT * FROM orders")
.withSetupQuery(Optional.of("CREATE TABLE <table> AS SELECT * FROM orders WITH NO DATA"))
.withCleanupQuery(Optional.of("DROP TABLE <table>"))
.experiencing(TASK_GET_RESULTS_REQUEST_TIMEOUT)
.at(leafStage())
.failsWithoutRetries(failure -> failure.hasMessageContaining("Encountered too many errors talking to a worker node"))
// get results timeout for leaf stage will not result in accounted task failure if failure recovery is enabled
.finishesSuccessfullyWithoutTaskFailures();
}
}
protected void testTableModification(Optional<String> setupQuery, String query, Optional<String> cleanupQuery)
{
testTableModification(Optional.empty(), setupQuery, query, cleanupQuery);
}
protected void testTableModification(Optional<Session> session, Optional<String> setupQuery, String query, Optional<String> cleanupQuery)
{
testNonSelect(session, setupQuery, query, cleanupQuery, true);
}
protected void testNonSelect(Optional<Session> session, Optional<String> setupQuery, String query, Optional<String> cleanupQuery, boolean writesData)
{
if (writesData && !areWriteRetriesSupported()) {
// if retries are not supported assert on that and skip actual failures simulation
assertThatQuery(query)
.withSession(session)
.withSetupQuery(setupQuery)
.withCleanupQuery(cleanupQuery)
.failsDespiteRetries(failure -> failure.hasMessageMatching("This connector does not support query retries"));
return;
}
assertThatQuery(query)
.withSession(session)
.withSetupQuery(setupQuery)
.withCleanupQuery(cleanupQuery)
.experiencing(TASK_FAILURE, Optional.of(ErrorType.INTERNAL_ERROR))
.at(boundaryCoordinatorStage())
.failsAlways(failure -> failure.hasMessageContaining(FAILURE_INJECTION_MESSAGE));
assertThatQuery(query)
.withSession(session)
.withSetupQuery(setupQuery)
.withCleanupQuery(cleanupQuery)
.experiencing(TASK_FAILURE, Optional.of(ErrorType.INTERNAL_ERROR))
.at(rootStage())
.failsAlways(failure -> failure.hasMessageContaining(FAILURE_INJECTION_MESSAGE));
assertThatQuery(query)
.withSession(session)
.withSetupQuery(setupQuery)
.withCleanupQuery(cleanupQuery)
.experiencing(TASK_FAILURE, Optional.of(ErrorType.INTERNAL_ERROR))
.at(leafStage())
.failsWithoutRetries(failure -> failure.hasMessageContaining(FAILURE_INJECTION_MESSAGE))
.finishesSuccessfully();
assertThatQuery(query)
.withSession(session)
.withSetupQuery(setupQuery)
.withCleanupQuery(cleanupQuery)
.experiencing(TASK_FAILURE, Optional.of(ErrorType.INTERNAL_ERROR))
.at(boundaryDistributedStage())
.failsWithoutRetries(failure -> failure.hasMessageContaining(FAILURE_INJECTION_MESSAGE))
.finishesSuccessfully();
assertThatQuery(query)
.withSession(session)
.withSetupQuery(setupQuery)
.withCleanupQuery(cleanupQuery)
.experiencing(TASK_FAILURE, Optional.of(ErrorType.INTERNAL_ERROR))
.at(intermediateDistributedStage())
.failsWithoutRetries(failure -> failure.hasMessageContaining(FAILURE_INJECTION_MESSAGE))
.finishesSuccessfully();
assertThatQuery(query)
.withSession(session)
.withSetupQuery(setupQuery)
.withCleanupQuery(cleanupQuery)
.experiencing(TASK_MANAGEMENT_REQUEST_FAILURE)
.at(boundaryDistributedStage())
.failsWithoutRetries(failure -> failure.hasMessageFindingMatch("Error 500 Internal Server Error|Error closing remote buffer, expected 204 got 500"))
.finishesSuccessfully();
assertThatQuery(query)
.withSession(session)
.withSetupQuery(setupQuery)
.withCleanupQuery(cleanupQuery)
.experiencing(TASK_GET_RESULTS_REQUEST_FAILURE)
.at(boundaryDistributedStage())
.failsWithoutRetries(failure -> failure.hasMessageFindingMatch("Error 500 Internal Server Error|Error closing remote buffer, expected 204 got 500"))
.finishesSuccessfully();
assertThatQuery(query)
.withSetupQuery(setupQuery)
.withCleanupQuery(cleanupQuery)
.experiencing(TASK_MANAGEMENT_REQUEST_TIMEOUT)
.at(boundaryDistributedStage())
.failsWithoutRetries(failure -> failure.hasMessageContaining("Encountered too many errors talking to a worker node"))
.finishesSuccessfully();
assertThatQuery(query)
.withSetupQuery(setupQuery)
.withCleanupQuery(cleanupQuery)
.experiencing(TASK_GET_RESULTS_REQUEST_TIMEOUT)
.at(boundaryDistributedStage())
.failsWithoutRetries(failure -> failure.hasMessageContaining("Encountered too many errors talking to a worker node"))
.finishesSuccessfully();
}
private FailureRecoveryAssert assertThatQuery(String query)
{
return new FailureRecoveryAssert(query);
}
protected class FailureRecoveryAssert
{
private final String query;
private Session session = getQueryRunner().getDefaultSession();
private Optional<Function<MaterializedResult, Integer>> stageSelector;
private Optional<InjectedFailureType> failureType = Optional.empty();
private Optional<ErrorType> errorType = Optional.empty();
private Optional<String> setup = Optional.empty();
private Optional<String> cleanup = Optional.empty();
public FailureRecoveryAssert(String query)
{
this.query = requireNonNull(query, "query is null");
}
public FailureRecoveryAssert withSession(Optional<Session> session)
{
requireNonNull(session, "session is null");
session.ifPresent(value -> this.session = value);
return this;
}
public FailureRecoveryAssert withSetupQuery(Optional<String> query)
{
setup = requireNonNull(query, "query is null");
return this;
}
public FailureRecoveryAssert withCleanupQuery(Optional<String> query)
{
cleanup = requireNonNull(query, "query is null");
return this;
}
public FailureRecoveryAssert experiencing(InjectedFailureType failureType)
{
return experiencing(failureType, Optional.empty());
}
public FailureRecoveryAssert experiencing(InjectedFailureType failureType, Optional<ErrorType> errorType)
{
this.failureType = Optional.of(requireNonNull(failureType, "failureType is null"));
this.errorType = requireNonNull(errorType, "errorType is null");
if (failureType == TASK_FAILURE) {
checkArgument(errorType.isPresent(), "error type must be present when injection type is task failure");
}
else {
checkArgument(errorType.isEmpty(), "error type must not be present when injection type is not task failure");
}
return this;
}
public FailureRecoveryAssert at(Function<MaterializedResult, Integer> stageSelector)
{
this.stageSelector = Optional.of(requireNonNull(stageSelector, "stageSelector is null"));
return this;
}
private ExecutionResult executeExpected()
{
return execute(noRetries(session), query, Optional.empty());
}
private ExecutionResult executeActual(OptionalInt failureStageId)
{
return executeActual(session, failureStageId);
}
private ExecutionResult executeActualNoRetries(OptionalInt failureStageId)
{
return executeActual(noRetries(session), failureStageId);
}
private ExecutionResult executeActual(Session session, OptionalInt failureStageId)
{
String token = UUID.randomUUID().toString();
if (failureType.isPresent()) {
getQueryRunner().injectTaskFailure(
token,
failureStageId.orElseThrow(() -> new IllegalArgumentException("failure stageId not provided")),
0,
0,
failureType.get(),
errorType);
return execute(session, query, Optional.of(token));
}
// no failure injected
return execute(session, query, Optional.of(token));
}
private ExecutionResult execute(Session session, String query, Optional<String> traceToken)
{
String tableName = "table_" + randomTableSuffix();
setup.ifPresent(sql -> getQueryRunner().execute(noRetries(session), resolveTableName(sql, tableName)));
ResultWithQueryId<MaterializedResult> resultWithQueryId = null;
RuntimeException failure = null;
try {
resultWithQueryId = getDistributedQueryRunner().executeWithQueryId(withTraceToken(session, traceToken), resolveTableName(query, tableName));
}
catch (RuntimeException e) {
failure = e;
}
MaterializedResult result = resultWithQueryId == null ? null : resultWithQueryId.getResult();
Optional<MaterializedResult> updatedTableContent = Optional.empty();
if (result != null && result.getUpdateCount().isPresent()) {
updatedTableContent = Optional.of(getQueryRunner().execute(noRetries(session), "SELECT * FROM " + tableName));
}
Optional<MaterializedResult> updatedTableStatistics = Optional.empty();
if (result != null && result.getUpdateType().isPresent() && result.getUpdateType().get().equals("ANALYZE")) {
updatedTableStatistics = Optional.of(getQueryRunner().execute(noRetries(session), "SHOW STATS FOR " + tableName));
}
try {
cleanup.ifPresent(sql -> getQueryRunner().execute(noRetries(session), resolveTableName(sql, tableName)));
}
catch (RuntimeException e) {
if (failure == null) {
failure = e;
}
else if (failure != e) {
failure.addSuppressed(e);
}
}
if (failure != null) {
throw failure;
}
return new ExecutionResult(resultWithQueryId, updatedTableContent, updatedTableStatistics);
}
public void finishesSuccessfully()
{
finishesSuccessfully(queryId -> {});
}
public void finishesSuccessfullyWithoutTaskFailures()
{
finishesSuccessfully(queryId -> {}, false);
}
private void finishesSuccessfully(Consumer<QueryId> queryAssertion)
{
finishesSuccessfully(queryAssertion, true);
}
public void finishesSuccessfully(Consumer<QueryId> queryAssertion, boolean expectTaskFailures)
{
verifyFailureTypeAndStageSelector();
ExecutionResult expected = executeExpected();
MaterializedResult expectedQueryResult = expected.getQueryResult();
OptionalInt failureStageId = getFailureStageId(() -> expectedQueryResult);
ExecutionResult actual = executeActual(failureStageId);
assertEquals(getStageStats(actual.getQueryResult(), failureStageId.getAsInt()).getFailedTasks(), expectTaskFailures ? 1 : 0);
MaterializedResult actualQueryResult = actual.getQueryResult();
boolean isAnalyze = expectedQueryResult.getUpdateType().isPresent() && expectedQueryResult.getUpdateType().get().equals("ANALYZE");
boolean isUpdate = expectedQueryResult.getUpdateCount().isPresent();
boolean isExplain = query.trim().toUpperCase(ENGLISH).startsWith("EXPLAIN");
if (isAnalyze) {
assertEquals(actualQueryResult.getUpdateCount(), expectedQueryResult.getUpdateCount());
assertThat(expected.getUpdatedTableStatistics()).isPresent();
assertThat(actual.getUpdatedTableStatistics()).isPresent();
MaterializedResult expectedUpdatedTableStatistics = expected.getUpdatedTableStatistics().get();
MaterializedResult actualUpdatedTableStatistics = actual.getUpdatedTableStatistics().get();
assertEqualsIgnoreOrder(actualUpdatedTableStatistics, expectedUpdatedTableStatistics, "For query: \n " + query);
}
else if (isUpdate) {
assertEquals(actualQueryResult.getUpdateCount(), expectedQueryResult.getUpdateCount());
assertThat(expected.getUpdatedTableContent()).isPresent();
assertThat(actual.getUpdatedTableContent()).isPresent();
MaterializedResult expectedUpdatedTableContent = expected.getUpdatedTableContent().get();
MaterializedResult actualUpdatedTableContent = actual.getUpdatedTableContent().get();
assertEqualsIgnoreOrder(actualUpdatedTableContent, expectedUpdatedTableContent, "For query: \n " + query);
}
else if (isExplain) {
assertEquals(actualQueryResult.getRowCount(), expectedQueryResult.getRowCount());
}
else {
assertEqualsIgnoreOrder(actualQueryResult, expectedQueryResult, "For query: \n " + query);
}
queryAssertion.accept(actual.getQueryId());
}
public FailureRecoveryAssert failsAlways(Consumer<AbstractThrowableAssert<?, ? extends Throwable>> failureAssertion)
{
failsWithoutRetries(failureAssertion);
failsDespiteRetries(failureAssertion);
return this;
}
public FailureRecoveryAssert failsWithoutRetries(Consumer<AbstractThrowableAssert<?, ? extends Throwable>> failureAssertion)
{
verifyFailureTypeAndStageSelector();
OptionalInt failureStageId = getFailureStageId(() -> executeExpected().getQueryResult());
failureAssertion.accept(assertThatThrownBy(() -> executeActualNoRetries(failureStageId)));
return this;
}
public FailureRecoveryAssert failsDespiteRetries(Consumer<AbstractThrowableAssert<?, ? extends Throwable>> failureAssertion)
{
verifyFailureTypeAndStageSelector();
OptionalInt failureStageId = getFailureStageId(() -> executeExpected().getQueryResult());
failureAssertion.accept(assertThatThrownBy(() -> executeActual(failureStageId)));
return this;
}
private void verifyFailureTypeAndStageSelector()
{
assertThat(failureType.isPresent() == stageSelector.isPresent()).withFailMessage("Either both or none of failureType and stageSelector must be set").isTrue();
}
private OptionalInt getFailureStageId(Supplier<MaterializedResult> expectedQueryResult)
{
if (stageSelector.isEmpty()) {
return OptionalInt.empty();
}
// only compute MaterializedResult if needed
return OptionalInt.of(stageSelector.get().apply(expectedQueryResult.get()));
}
private String resolveTableName(String query, String tableName)
{
return query.replaceAll("<table>", tableName);
}
private Session noRetries(Session session)
{
return Session.builder(session)
.setSystemProperty("retry_policy", "NONE")
.build();
}
private Session withTraceToken(Session session, Optional<String> traceToken)
{
return Session.builder(session)
.setTraceToken(traceToken)
.build();
}
}
private static class ExecutionResult
{
private final MaterializedResult queryResult;
private final QueryId queryId;
private final Optional<MaterializedResult> updatedTableContent;
private final Optional<MaterializedResult> updatedTableStatistics;
private ExecutionResult(
ResultWithQueryId<MaterializedResult> resultWithQueryId,
Optional<MaterializedResult> updatedTableContent,
Optional<MaterializedResult> updatedTableStatistics)
{
requireNonNull(resultWithQueryId, "resultWithQueryId is null");
this.queryResult = resultWithQueryId.getResult();
this.queryId = resultWithQueryId.getQueryId();
this.updatedTableContent = requireNonNull(updatedTableContent, "updatedTableContent is null");
this.updatedTableStatistics = requireNonNull(updatedTableStatistics, "updatedTableStatistics is null");
}
public MaterializedResult getQueryResult()
{
return queryResult;
}
public QueryId getQueryId()
{
return queryId;
}
public Optional<MaterializedResult> getUpdatedTableContent()
{
return updatedTableContent;
}
public Optional<MaterializedResult> getUpdatedTableStatistics()
{
return updatedTableStatistics;
}
}
protected static Function<MaterializedResult, Integer> rootStage()
{
return result -> parseInt(getRootStage(result).getStageId());
}
protected static Function<MaterializedResult, Integer> boundaryCoordinatorStage()
{
return result -> findStageId(result, stage -> stage.isCoordinatorOnly() && stage.getSubStages().stream().noneMatch(StageStats::isCoordinatorOnly));
}
protected static Function<MaterializedResult, Integer> boundaryDistributedStage()
{
return result -> {
StageStats rootStage = getRootStage(result);
if (!rootStage.isCoordinatorOnly()) {
return parseInt(rootStage.getStageId());
}
StageStats boundaryCoordinatorStage = findStage(result, stage -> stage.isCoordinatorOnly() && stage.getSubStages().stream().noneMatch(StageStats::isCoordinatorOnly));
StageStats boundaryDistributedStage = boundaryCoordinatorStage.getSubStages().get(ThreadLocalRandom.current().nextInt(boundaryCoordinatorStage.getSubStages().size()));
return parseInt(boundaryDistributedStage.getStageId());
};
}
protected static Function<MaterializedResult, Integer> intermediateDistributedStage()
{
return result -> findStageId(result, stage -> !stage.isCoordinatorOnly() && !stage.getSubStages().isEmpty());
}
protected static Function<MaterializedResult, Integer> leafStage()
{
return result -> findStageId(result, stage -> stage.getSubStages().isEmpty());
}
private static int findStageId(MaterializedResult result, Predicate<StageStats> predicate)
{
return parseInt(findStage(result, predicate).getStageId());
}
private static StageStats findStage(MaterializedResult result, Predicate<StageStats> predicate)
{
List<StageStats> stages = stream(Traverser.forTree(StageStats::getSubStages).breadthFirst(getRootStage(result)))
.filter(predicate)
.collect(toImmutableList());
if (stages.isEmpty()) {
throw new IllegalArgumentException("stage not found");
}
return stages.get(ThreadLocalRandom.current().nextInt(stages.size()));
}
private static StageStats getStageStats(MaterializedResult result, int stageId)
{
return stream(Traverser.forTree(StageStats::getSubStages).breadthFirst(getRootStage(result)))
.filter(stageStats -> parseInt(stageStats.getStageId()) == stageId)
.findFirst()
.orElseThrow(() -> new IllegalArgumentException("stage stats not found: " + stageId));
}
private static StageStats getRootStage(MaterializedResult result)
{
StatementStats statementStats = result.getStatementStats().orElseThrow(() -> new IllegalArgumentException("statement stats is not present"));
return requireNonNull(statementStats.getRootStage(), "root stage is null");
}
private Session enableDynamicFiltering(boolean enabled)
{
Session defaultSession = getQueryRunner().getDefaultSession();
return Session.builder(defaultSession)
.setSystemProperty(ENABLE_DYNAMIC_FILTERING, Boolean.toString(enabled))
.setSystemProperty(JOIN_REORDERING_STRATEGY, NONE.name())
.setSystemProperty(JOIN_DISTRIBUTION_TYPE, PARTITIONED.name())
.setCatalogSessionProperty(defaultSession.getCatalog().orElseThrow(), "dynamic_filtering_wait_timeout", "1h")
.build();
}
}
| Use proper table in DML fault tolerance tests
| testing/trino-testing/src/main/java/io/trino/testing/BaseFailureRecoveryTest.java | Use proper table in DML fault tolerance tests |
|
Java | apache-2.0 | 7b98c1ad27447e2b608c3b494627239d8745eb15 | 0 | codehaus/mvel,codehaus/mvel | package org.mvel.tests.core;
import org.mvel.*;
import static org.mvel.MVEL.*;
import static org.mvel.MVEL.compileExpression;
import static org.mvel.MVEL.executeExpression;
import org.mvel.ast.ASTNode;
import org.mvel.ast.Function;
import org.mvel.ast.WithNode;
import org.mvel.compiler.CompiledExpression;
import org.mvel.compiler.ExecutableStatement;
import org.mvel.compiler.ExpressionCompiler;
import org.mvel.debug.DebugTools;
import org.mvel.debug.Debugger;
import org.mvel.debug.Frame;
import org.mvel.integration.Interceptor;
import org.mvel.integration.PropertyHandlerFactory;
import org.mvel.integration.ResolverTools;
import org.mvel.integration.VariableResolverFactory;
import org.mvel.integration.impl.ClassImportResolverFactory;
import org.mvel.integration.impl.DefaultLocalVariableResolverFactory;
import org.mvel.integration.impl.MapVariableResolverFactory;
import org.mvel.integration.impl.StaticMethodImportResolverFactory;
import org.mvel.optimizers.OptimizerFactory;
import org.mvel.tests.core.res.*;
import org.mvel.util.CompilerTools;
import org.mvel.util.MethodStub;
import static org.mvel.util.ParseTools.loadFromFile;
import java.awt.*;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.Type;
import java.text.SimpleDateFormat;
import java.util.*;
import static java.util.Collections.unmodifiableCollection;
import java.util.List;
@SuppressWarnings({"ALL"})
public class CoreConfidenceTests extends AbstractTest {
public void testSingleProperty() {
assertEquals(false, test("fun"));
}
public void testMethodOnValue() {
assertEquals("DOG", test("foo.bar.name.toUpperCase()"));
}
public void testMethodOnValue2() {
assertEquals("DOG", test("foo. bar. name.toUpperCase()"));
}
public void testSimpleProperty() {
assertEquals("dog", test("foo.bar.name"));
}
public void testSimpleProperty2() {
assertEquals("cat", test("DATA"));
}
public void testPropertyViaDerivedClass() {
assertEquals("cat", test("derived.data"));
}
public void testDeepAssignment() {
Map map = createTestMap();
assertEquals("crap", testCompiledSimple("foo.bar.assignTest = 'crap'", map));
assertEquals("crap", testCompiledSimple("foo.bar.assignTest", map));
}
public void testDeepAssignment2() {
Map map = createTestMap();
ExpressionCompiler compiler = new ExpressionCompiler("foo.bar.age = 21");
ParserContext ctx = new ParserContext();
ctx.addInput("foo", Foo.class);
ctx.setStrongTyping(true);
CompiledExpression ce = compiler.compile(ctx);
executeExpression(ce, map);
assertEquals(((Foo) map.get("foo")).getBar().getAge(), 21);
}
public void testThroughInterface() {
assertEquals("FOOBAR!", test("testImpl.name"));
}
public void testThroughInterface2() {
assertEquals(true, test("testImpl.foo"));
}
public void testMapAccessWithMethodCall() {
assertEquals("happyBar", test("funMap['foo'].happy()"));
}
public void testSimpleIfStatement() {
test("if (true) { System.out.println(\"test!\") } \n");
}
public void testBooleanOperator() {
assertEquals(true, test("foo.bar.woof == true"));
}
public void testBooleanOperator2() {
assertEquals(false, test("foo.bar.woof == false"));
}
public void testBooleanOperator3() {
assertEquals(true, test("foo.bar.woof== true"));
}
public void testBooleanOperator4() {
assertEquals(false, test("foo.bar.woof ==false"));
}
public void testBooleanOperator5() {
assertEquals(true, test("foo.bar.woof == true"));
}
public void testBooleanOperator6() {
assertEquals(false, test("foo.bar.woof==false"));
}
public void testTextComparison() {
assertEquals(true, test("foo.bar.name == 'dog'"));
}
public void testNETextComparison() {
assertEquals(true, test("foo.bar.name != 'foo'"));
}
public void testChor() {
assertEquals("cat", test("a or b or c"));
}
public void testChorWithLiteral() {
assertEquals("fubar", test("a or 'fubar'"));
}
public void testNullCompare() {
assertEquals(true, test("c != null"));
}
public void testUninitializedInt() {
assertEquals(0, test("sarahl"));
}
public void testAnd() {
assertEquals(true, test("c != null && foo.bar.name == 'dog' && foo.bar.woof"));
}
public void testAnd2() {
assertEquals(true, test("c!=null&&foo.bar.name=='dog'&&foo.bar.woof"));
}
public void testMath() {
assertEquals(188.4d, test("pi * hour"));
}
public void testMath2() {
assertEquals(3, test("foo.number-1"));
}
public void testMath3() {
assertEquals((10d * 5d) * 2d / 3d, test("(10 * 5) * 2 / 3"));
}
public void testMath4() {
int val = (int) ((100d % 3d) * 2d - 1d / 1d + 8d + (5d * 2d));
assertEquals(val, test("(100 % 3) * 2 - 1 / 1 + 8 + (5 * 2)"));
}
public void testMath4a() {
String expression = "(100 % 90) * 20 - 15 / 16 + 80 + (50 * 21)";
System.out.println("Expression: " + expression);
assertEquals(((100d % 90d) * 20d - 15d / 16d + 80d + (50d * 21d)), MVEL.eval(expression));
}
public void testMath5() {
assertEquals(300.5 / 5.3 / 2.1 / 1.5, test("300.5 / 5.3 / 2.1 / 1.5"));
}
public void testMath5a() {
String expression = "300.5 / 5.3 / 2.1 / 1.5";
System.out.println("Expression: " + expression);
assertEquals(300.5 / 5.3 / 2.1 / 1.5, MVEL.eval(expression));
}
public void testMath6() {
int val = (300 * 5 + 1) + 100 / 2 * 2;
assertEquals(val, test("(300 * five + 1) + (100 / 2 * 2)"));
}
public void testMath7() {
int val = (int) ((100d % 3d) * 2d - 1d / 1d + 8d + (5d * 2d));
assertEquals(val, test("(100 % 3) * 2 - 1 / 1 + 8 + (5 * 2)"));
}
public void testMath8() {
double val = 5d * (100.56d * 30.1d);
assertEquals(val, test("5 * (100.56 * 30.1)"));
}
public void testPowerOf() {
assertEquals(25, test("5 ** 2"));
}
public void testWhileUsingImports() {
Map<String, Object> imports = new HashMap<String, Object>();
imports.put("ArrayList", java.util.ArrayList.class);
imports.put("List", java.util.List.class);
ParserContext context = new ParserContext(imports, null, "testfile");
ExpressionCompiler compiler = new ExpressionCompiler("List list = new ArrayList(); return (list == empty)");
assertTrue((Boolean) executeExpression(compiler.compile(context), new DefaultLocalVariableResolverFactory()));
}
public void testComplexExpression() {
assertEquals("bar", test("a = 'foo'; b = 'bar'; c = 'jim'; list = {a,b,c}; list[1]"));
}
public void testComplexAnd() {
assertEquals(true, test("(pi * hour) > 0 && foo.happy() == 'happyBar'"));
}
public void testOperatorPrecedence() {
String ex = "_x_001 = 500.2; _x_002 = 200.8; _r_001 = 701; _r_001 == _x_001 + _x_002 || _x_001 == 500 + 0.1";
assertEquals(true, test(ex));
}
public void testOperatorPrecedence2() {
String ex = "_x_001 = 500.2; _x_002 = 200.8; _r_001 = 701; _r_001 == _x_001 + _x_002 && _x_001 == 500 + 0.2";
assertEquals(true, test(ex));
}
public void testOperatorPrecedence3() {
String ex = "_x_001 = 500.2; _x_002 = 200.9; _r_001 = 701; _r_001 == _x_001 + _x_002 && _x_001 == 500 + 0.2";
assertEquals(false, test(ex));
}
public void testOperatorPrecedence4() {
String ex = "_x_001 = 500.2; _x_002 = 200.9; _r_001 = 701; _r_001 == _x_001 + _x_002 || _x_001 == 500 + 0.2";
assertEquals(true, test(ex));
}
public void testOperatorPrecedence5() {
String ex = "_x_001 == _x_001 / 2 - _x_001 + _x_001 + _x_001 / 2 && _x_002 / 2 == _x_002 / 2";
Map vars = new HashMap();
vars.put("_x_001", 500.2);
vars.put("_x_002", 200.9);
vars.put("_r_001", 701);
ExpressionCompiler compiler = new ExpressionCompiler(ex);
assertEquals(true, executeExpression(compiler.compile(), vars));
}
public void testShortPathExpression() {
assertEquals(null, MVEL.eval("3 > 4 && foo.toUC('test'); foo.register", new Base(), createTestMap()));
}
public void testShortPathExpression2() {
assertEquals(true, test("4 > 3 || foo.toUC('test')"));
}
public void testShortPathExpression4() {
assertEquals(true, test("4>3||foo.toUC('test')"));
}
public void testOrOperator() {
assertEquals(true, test("true||true"));
}
public void testOrOperator2() {
assertEquals(true, test("2 > 3 || 3 > 2"));
}
public void testOrOperator3() {
assertEquals(true, test("pi > 5 || pi > 6 || pi > 3"));
}
public void testShortPathExpression3() {
assertEquals(false, test("defnull != null && defnull.length() > 0"));
}
public void testModulus() {
assertEquals(38392 % 2,
test("38392 % 2"));
}
public void testLessThan() {
assertEquals(true, test("pi < 3.15"));
assertEquals(true, test("pi <= 3.14"));
assertEquals(false, test("pi > 3.14"));
assertEquals(true, test("pi >= 3.14"));
}
public void testMethodAccess() {
assertEquals("happyBar", test("foo.happy()"));
}
public void testMethodAccess2() {
assertEquals("FUBAR", test("foo.toUC( 'fubar' )"));
}
public void testMethodAccess3() {
assertEquals(true, test("equalityCheck(c, 'cat')"));
}
public void testMethodAccess4() {
assertEquals(null, test("readBack(null)"));
}
public void testMethodAccess5() {
assertEquals("nulltest", test("appendTwoStrings(null, 'test')"));
}
public void testMethodAccess6() {
assertEquals(true, test(" equalityCheck( c \n , \n 'cat' ) "));
}
public void testNegation() {
assertEquals(true, test("!fun && !fun"));
}
public void testNegation2() {
assertEquals(false, test("fun && !fun"));
}
public void testNegation3() {
assertEquals(true, test("!(fun && fun)"));
}
public void testNegation4() {
assertEquals(false, test("(fun && fun)"));
}
public void testNegation5() {
assertEquals(true, test("!false"));
}
public void testNegation6() {
assertEquals(false, test("!true"));
}
public void testNegation7() {
assertEquals(true, test("s = false; t = !s; t"));
}
public void testNegation8() {
assertEquals(true, test("s = false; t =! s; t"));
}
public void testMultiStatement() {
assertEquals(true, test("populate(); barfoo == 'sarah'"));
}
public void testAssignment() {
assertEquals(true, test("populate(); blahfoo = 'sarah'; blahfoo == 'sarah'"));
}
public void testAssignment2() {
assertEquals("sarah", test("populate(); blahfoo = barfoo"));
}
public void testAssignment3() {
assertEquals(java.lang.Integer.class, test("blah = 5").getClass());
}
public void testAssignment4() {
assertEquals(102, test("a = 100 + 1 + 1"));
}
public void testAssignment6() {
assertEquals("blip", test("array[zero] = array[zero+1]; array[zero]"));
}
public void testOr() {
assertEquals(true, test("fun || true"));
}
public void testLiteralPassThrough() {
assertEquals(true, test("true"));
}
public void testLiteralPassThrough2() {
assertEquals(false, test("false"));
}
public void testLiteralPassThrough3() {
assertEquals(null, test("null"));
}
public void testLiteralReduction1() {
assertEquals("foo", test("null or 'foo'"));
}
public void testRegEx() {
assertEquals(true, test("foo.bar.name ~= '[a-z].+'"));
}
public void testRegExNegate() {
assertEquals(false, test("!(foo.bar.name ~= '[a-z].+')"));
}
public void testRegEx2() {
assertEquals(true, test("foo.bar.name ~= '[a-z].+' && foo.bar.name != null"));
}
public void testRegEx3() {
assertEquals(true, test("foo.bar.name~='[a-z].+'&&foo.bar.name!=null"));
}
public void testBlank() {
assertEquals(true, test("'' == empty"));
}
public void testBlank2() {
assertEquals(true, test("BWAH == empty"));
}
public void testBooleanModeOnly2() {
assertEquals(false, (Object) DataConversion.convert(test("BWAH"), Boolean.class));
}
public void testBooleanModeOnly4() {
assertEquals(true, test("hour == (hour + 0)"));
}
public void testTernary() {
assertEquals("foobie", test("zero==0?'foobie':zero"));
}
public void testTernary2() {
assertEquals("blimpie", test("zero==1?'foobie':'blimpie'"));
}
public void testTernary3() {
assertEquals("foobiebarbie", test("zero==1?'foobie':'foobie'+'barbie'"));
}
public void testStrAppend() {
assertEquals("foobarcar", test("'foo' + 'bar' + 'car'"));
}
public void testStrAppend2() {
assertEquals("foobarcar1", test("'foobar' + 'car' + 1"));
}
public void testInstanceCheck1() {
assertEquals(true, test("c is java.lang.String"));
}
public void testInstanceCheck2() {
assertEquals(false, test("pi is java.lang.Integer"));
}
public void testInstanceCheck3() {
assertEquals(true, test("foo is org.mvel.tests.core.res.Foo"));
}
public void testBitwiseOr1() {
assertEquals(6, test("2|4"));
}
public void testBitwiseOr2() {
assertEquals(true, test("(2 | 1) > 0"));
}
public void testBitwiseOr3() {
assertEquals(true, test("(2|1) == 3"));
}
public void testBitwiseOr4() {
assertEquals(2 | 5, test("2|five"));
}
public void testBitwiseAnd1() {
assertEquals(2, test("2 & 3"));
}
public void testBitwiseAnd2() {
assertEquals(5 & 3, test("five & 3"));
}
public void testShiftLeft() {
assertEquals(4, test("2 << 1"));
}
public void testShiftLeft2() {
assertEquals(5 << 1, test("five << 1"));
}
public void testUnsignedShiftLeft() {
assertEquals(2, test("-2 <<< 0"));
}
// public void testUnsignedShiftLeft2() {
// assertEquals(5, test("(five - 10) <<< 0"));
// }
public void testShiftRight() {
assertEquals(128, test("256 >> 1"));
}
public void testShiftRight2() {
assertEquals(5 >> 1, test("five >> 1"));
}
public void testUnsignedShiftRight() {
assertEquals(-5 >>> 1, test("-5 >>> 1"));
}
public void testUnsignedShiftRight2() {
assertEquals(-5 >>> 1, test("(five - 10) >>> 1"));
}
public void testXOR() {
assertEquals(3, test("1 ^ 2"));
}
public void testXOR2() {
assertEquals(5 ^ 2, test("five ^ 2"));
}
public void testContains1() {
assertEquals(true, test("list contains 'Happy!'"));
}
public void testContains2() {
assertEquals(false, test("list contains 'Foobie'"));
}
public void testContains3() {
assertEquals(true, test("sentence contains 'fox'"));
}
public void testContains4() {
assertEquals(false, test("sentence contains 'mike'"));
}
public void testContains5() {
assertEquals(true, test("!(sentence contains 'mike')"));
}
public void testContains6() {
assertEquals(true, test("bwahbwah = 'mikebrock'; testVar10 = 'mike'; bwahbwah contains testVar10"));
}
public void testInvert() {
assertEquals(~10, test("~10"));
}
public void testInvert2() {
assertEquals(~(10 + 1), test("~(10 + 1)"));
}
public void testInvert3() {
assertEquals(~10 + (1 + ~50), test("~10 + (1 + ~50)"));
}
public void testListCreation2() {
assertTrue(test("[\"test\"]") instanceof List);
}
public void testListCreation3() {
assertTrue(test("[66]") instanceof List);
}
public void testListCreation4() {
List ar = (List) test("[ 66 , \"test\" ]");
assertEquals(2, ar.size());
assertEquals(66, ar.get(0));
assertEquals("test", ar.get(1));
}
public void testListCreationWithCall() {
assertEquals(1, test("[\"apple\"].size()"));
}
public void testArrayCreationWithLength() {
assertEquals(2, test("Array.getLength({'foo', 'bar'})"));
}
public void testEmptyList() {
assertTrue(test("[]") instanceof List);
}
public void testEmptyArray() {
assertTrue(((Object[]) test("{}")).length == 0);
}
public void testEmptyArray2() {
assertTrue(((Object[]) test("{ }")).length == 0);
}
public void testArrayCreation() {
assertEquals(0, test("arrayTest = {{1, 2, 3}, {2, 1, 0}}; arrayTest[1][2]"));
}
public void testMapCreation() {
assertEquals("sarah", test("map = ['mike':'sarah','tom':'jacquelin']; map['mike']"));
}
public void testMapCreation2() {
assertEquals("sarah", test("map = ['mike' :'sarah' ,'tom' :'jacquelin' ]; map['mike']"));
}
public void testMapCreation3() {
assertEquals("foo", test("map = [1 : 'foo']; map[1]"));
}
public void testProjectionSupport() {
assertEquals(true, test("(name in things)contains'Bob'"));
}
public void testProjectionSupport1() {
assertEquals(true, test("(name in things) contains 'Bob'"));
}
public void testProjectionSupport2() {
assertEquals(3, test("(name in things).size()"));
}
public void testSizeOnInlineArray() {
assertEquals(3, test("{1,2,3}.size()"));
}
public void testSimpleListCreation() {
test("['foo', 'bar', 'foobar', 'FOOBAR']");
}
public void testStaticMethodFromLiteral() {
assertEquals(String.class.getName(), test("String.valueOf(Class.forName('java.lang.String').getName())"));
}
public void testObjectInstantiation() {
test("new java.lang.String('foobie')");
}
public void testObjectInstantiationWithMethodCall() {
assertEquals("FOOBIE", test("new String('foobie') . toUpperCase()"));
}
public void testObjectInstantiation2() {
test("new String() is String");
}
public void testObjectInstantiation3() {
test("new java.text.SimpleDateFormat('yyyy').format(new java.util.Date(System.currentTimeMillis()))");
}
public void testArrayCoercion() {
assertEquals("gonk", test("funMethod( {'gonk', 'foo'} )"));
}
public void testArrayCoercion2() {
assertEquals(10, test("sum({2,2,2,2,2})"));
}
public void testMapAccess() {
assertEquals("dog", test("funMap['foo'].bar.name"));
}
public void testMapAccess2() {
assertEquals("dog", test("funMap.foo.bar.name"));
}
public void testSoundex() {
assertTrue((Boolean) test("'foobar' soundslike 'fubar'"));
}
public void testSoundex2() {
assertFalse((Boolean) test("'flexbar' soundslike 'fubar'"));
}
public void testSoundex3() {
assertEquals(true, test("(c soundslike 'kat')"));
}
public void testSoundex4() {
assertEquals(true, test("_xx1 = 'cat'; _xx2 = 'katt'; (_xx1 soundslike _xx2)"));
}
public void testSoundex5() {
assertEquals(true, test("_type = 'fubar';_type soundslike \"foobar\""));
}
public void testSimilarity1() {
assertEquals(0.6666667f, test("c strsim 'kat'"));
}
public void testThisReference() {
assertEquals(true, test("this") instanceof Base);
}
public void testThisReference2() {
assertEquals(true, test("this.funMap") instanceof Map);
}
public void testThisReference3() {
assertEquals(true, test("this is org.mvel.tests.core.res.Base"));
}
public void testThisReference4() {
assertEquals(true, test("this.funMap instanceof java.util.Map"));
}
public void testThisReference5() {
assertEquals(true, test("this.data == 'cat'"));
}
public void testThisReferenceInMethodCall() {
assertEquals(101, test("Integer.parseInt(this.number)"));
}
public void testThisReferenceInConstructor() {
assertEquals("101", test("new String(this.number)"));
}
// interpreted
public void testThisReferenceMapVirtualObjects() {
Map<String, String> map = new HashMap<String, String>();
map.put("foo", "bar");
VariableResolverFactory factory = new MapVariableResolverFactory(new HashMap<String, Object>());
factory.createVariable("this", map);
assertEquals(true, eval("this.foo == 'bar'", map, factory));
}
// compiled - reflective
public void testThisReferenceMapVirtualObjects1() {
// Create our root Map object
Map<String, String> map = new HashMap<String, String>();
map.put("foo", "bar");
VariableResolverFactory factory = new MapVariableResolverFactory(new HashMap<String, Object>());
factory.createVariable("this", map);
OptimizerFactory.setDefaultOptimizer("reflective");
// Run test
assertEquals(true, executeExpression(compileExpression("this.foo == 'bar'"), map, factory));
}
// compiled - asm
public void testThisReferenceMapVirtualObjects2() {
// Create our root Map object
Map<String, String> map = new HashMap<String, String>();
map.put("foo", "bar");
VariableResolverFactory factory = new MapVariableResolverFactory(new HashMap<String, Object>());
factory.createVariable("this", map);
// I think we can all figure this one out.
if (!Boolean.getBoolean("mvel.disable.jit")) OptimizerFactory.setDefaultOptimizer("ASM");
// Run test
assertEquals(true, executeExpression(compileExpression("this.foo == 'bar'"), map, factory));
}
public void testStringEscaping() {
assertEquals("\"Mike Brock\"", test("\"\\\"Mike Brock\\\"\""));
}
public void testStringEscaping2() {
assertEquals("MVEL's Parser is Fast", test("'MVEL\\'s Parser is Fast'"));
}
public void testEvalToBoolean() {
assertEquals(true, (boolean) evalToBoolean("true ", "true"));
assertEquals(true, (boolean) evalToBoolean("true ", "true"));
}
public void testCompiledMapStructures() {
executeExpression(compileExpression("['foo':'bar'] contains 'foo'"), null, null, Boolean.class);
}
public void testSubListInMap() {
assertEquals("pear", test("map = ['test' : 'poo', 'foo' : [c, 'pear']]; map['foo'][1]"));
}
public void testCompiledMethodCall() {
assertEquals(String.class, executeExpression(compileExpression("c.getClass()"), new Base(), createTestMap()));
}
public void testStaticNamespaceCall() {
assertEquals(java.util.ArrayList.class, test("java.util.ArrayList"));
}
public void testStaticNamespaceClassWithMethod() {
assertEquals("FooBar", test("java.lang.String.valueOf('FooBar')"));
}
public void testConstructor() {
assertEquals("foo", test("a = 'foobar'; new String(a.toCharArray(), 0, 3)"));
}
public void testStaticNamespaceClassWithField() {
assertEquals(Integer.MAX_VALUE, test("java.lang.Integer.MAX_VALUE"));
}
public void testStaticNamespaceClassWithField2() {
assertEquals(Integer.MAX_VALUE, test("Integer.MAX_VALUE"));
}
public void testStaticFieldAsMethodParm() {
assertEquals(String.valueOf(Integer.MAX_VALUE), test("String.valueOf(Integer.MAX_VALUE)"));
}
public void testEmptyIf() {
assertEquals(5, test("a = 5; if (a == 5) { }; return a;"));
}
public void testEmptyIf2() {
assertEquals(5, test("a=5;if(a==5){};return a;"));
}
public void testIf() {
assertEquals(10, test("if (5 > 4) { return 10; } else { return 5; }"));
}
public void testIf2() {
assertEquals(10, test("if (5 < 4) { return 5; } else { return 10; }"));
}
public void testIf3() {
assertEquals(10, test("if(5<4){return 5;}else{return 10;}"));
}
public void testIfAndElse() {
assertEquals(true, test("if (false) { return false; } else { return true; }"));
}
public void testIfAndElseif() {
assertEquals(true, test("if (false) { return false; } else if(100 < 50) { return false; } else if (10 > 5) return true;"));
}
public void testIfAndElseIfCondensedGrammar() {
assertEquals("Foo", test("if (false) return 'Bar'; else return 'Foo';"));
}
public void testForEach2() {
assertEquals(6, test("total = 0; a = {1,2,3}; foreach(item : a) { total += item }; total"));
}
public void testForEach3() {
assertEquals(true, test("a = {1,2,3}; foreach (i : a) { if (i == 1) { return true; } }"));
}
public void testForEach4() {
assertEquals("OneTwoThreeFour", test("a = {1,2,3,4}; builder = ''; foreach (i : a) {" +
" if (i == 1) { builder += 'One' } else if (i == 2) { builder += 'Two' } " +
"else if (i == 3) { builder += 'Three' } else { builder += 'Four' }" +
"}; builder;"));
}
public void testWith() {
assertEquals("OneTwo", test("with (foo) {aValue = 'One',bValue='Two'}; foo.aValue + foo.bValue;"));
}
public void testWith2() {
assertEquals("OneTwo", test(
"with (foo) { \n" +
"aValue = 'One', // this is a comment \n" +
"bValue='Two' // this is also a comment \n" +
"}; \n" +
"foo.aValue + foo.bValue;"));
}
public void testMagicArraySize() {
assertEquals(5, test("stringArray.size()"));
}
public void testMagicArraySize2() {
assertEquals(5, test("intArray.size()"));
}
public void testStaticVarAssignment() {
assertEquals("1", test("String mikeBrock = 1; mikeBrock"));
}
public void testImport() {
assertEquals(HashMap.class, test("import java.util.HashMap; HashMap;"));
}
public void testStaticImport() {
assertEquals(2.0, test("import_static java.lang.Math.sqrt; sqrt(4)"));
}
public void testFunctionPointer() {
assertEquals(2.0, test("squareRoot = java.lang.Math.sqrt; squareRoot(4)"));
}
public void testFunctionPointerAsParam() {
assertEquals("2.0", test("squareRoot = Math.sqrt; new String(String.valueOf(squareRoot(4)));"));
}
public void testFunctionPointerInAssignment() {
assertEquals(5.0, test("squareRoot = Math.sqrt; i = squareRoot(25); return i;"));
}
public void testIncrementOperator() {
assertEquals(2, test("x = 1; x++; x"));
}
public void testPreIncrementOperator() {
assertEquals(2, test("x = 1; ++x"));
}
public void testDecrementOperator() {
assertEquals(1, test("x = 2; x--; x"));
}
public void testPreDecrementOperator() {
assertEquals(1, test("x = 2; --x"));
}
public void testQualifiedStaticTyping() {
assertEquals(20, test("java.math.BigDecimal a = new java.math.BigDecimal( 10.0 ); java.math.BigDecimal b = new java.math.BigDecimal( 10.0 ); java.math.BigDecimal c = a + b; return c; "));
}
public void testUnQualifiedStaticTyping() {
CompiledExpression ce = (CompiledExpression) compileExpression("import java.math.BigDecimal; BigDecimal a = new BigDecimal( 10.0 ); BigDecimal b = new BigDecimal( 10.0 ); BigDecimal c = a + b; return c; ");
System.out.println(DebugTools.decompile(ce));
assertEquals(20, testCompiledSimple("import java.math.BigDecimal; BigDecimal a = new BigDecimal( 10.0 ); BigDecimal b = new BigDecimal( 10.0 ); BigDecimal c = a + b; return c; ", new HashMap()));
}
public void testObjectCreation() {
assertEquals(6, test("new Integer( 6 )"));
}
public void testTernary4() {
assertEquals("<test>", test("true ? '<test>' : '<poo>'"));
}
public void testStringAsCollection() {
assertEquals('o', test("abc = 'foo'; abc[1]"));
}
public void testSubExpressionIndexer() {
assertEquals("bar", test("xx = new java.util.HashMap(); xx.put('foo', 'bar'); prop = 'foo'; xx[prop];"));
}
public void testCompileTimeLiteralReduction() {
assertEquals(1000, test("10 * 100"));
}
public void testInterfaceResolution() {
Serializable ex = compileExpression("foo.collectionTest.size()");
Map map = createTestMap();
Foo foo = (Foo) map.get("foo");
foo.setCollectionTest(new HashSet());
Object result1 = executeExpression(ex, foo, map);
foo.setCollectionTest(new ArrayList());
Object result2 = executeExpression(ex, foo, map);
assertEquals(result1, result2);
}
/**
* Start collections framework based compliance tests
*/
public void testCreationOfSet() {
assertEquals("foo bar foo bar",
test("set = new java.util.LinkedHashSet(); " +
"set.add('foo');" +
"set.add('bar');" +
"output = '';" +
"foreach (item : set) {" +
"output = output + item + ' ';" +
"} " +
"foreach (item : set) {" +
"output = output + item + ' ';" +
"} " +
"output = output.trim();" +
"if (set.size() == 2) { return output; }"));
}
public void testCreationOfList() {
assertEquals(5, test("l = new java.util.LinkedList();" +
"l.add('fun');" +
"l.add('happy');" +
"l.add('fun');" +
"l.add('slide');" +
"l.add('crap');" +
"poo = new java.util.ArrayList(l);" +
"poo.size();"));
}
public void testMapOperations() {
assertEquals("poo5", test(
"l = new java.util.ArrayList();" +
"l.add('plop');" +
"l.add('poo');" +
"m = new java.util.HashMap();" +
"m.put('foo', l);" +
"m.put('cah', 'mah');" +
"m.put('bar', 'foo');" +
"m.put('sarah', 'mike');" +
"m.put('edgar', 'poe');" +
"" +
"if (m.edgar == 'poe') {" +
"return m.foo[1] + m.size();" +
"}"));
}
public void testStackOperations() {
assertEquals(10, test(
"stk = new java.util.Stack();" +
"stk.push(5);" +
"stk.push(5);" +
"stk.pop() + stk.pop();"
));
}
public void testSystemOutPrint() {
test("a = 0;\r\nSystem.out.println('This is a test');");
}
public void testBreakpoints() {
ExpressionCompiler compiler = new ExpressionCompiler("a = 5;\nb = 5;\n\nif (a == b) {\n\nSystem.out.println('Good');\nreturn a + b;\n}\n");
System.out.println("-------\n" + compiler.getExpression() + "\n-------\n");
compiler.setDebugSymbols(true);
ParserContext ctx = new ParserContext();
ctx.setSourceFile("test.mv");
CompiledExpression compiled = compiler.compile(ctx);
System.out.println(DebugTools.decompile(compiled));
MVELRuntime.registerBreakpoint("test.mv", 7);
Debugger testDebugger = new Debugger() {
public int onBreak(Frame frame) {
System.out.println("Breakpoint [source:" + frame.getSourceName() + "; line:" + frame.getLineNumber() + "]");
return 0;
}
};
MVELRuntime.setThreadDebugger(testDebugger);
assertEquals(10, MVEL.executeDebugger(compiled, null, new MapVariableResolverFactory(createTestMap())));
}
public void testBreakpoints2() {
ExpressionCompiler compiler = new ExpressionCompiler("System.out.println('test the debugger');\n a = 0;");
compiler.setDebugSymbols(true);
ParserContext ctx = new ParserContext();
ctx.setSourceFile("test.mv");
CompiledExpression compiled = compiler.compile(ctx);
System.out.println(DebugTools.decompile(compiled));
}
public void testBreakpoints3() {
String expr = "System.out.println( \"a1\" );\n" +
"System.out.println( \"a2\" );\n" +
"System.out.println( \"a3\" );\n" +
"System.out.println( \"a4\" );\n";
ExpressionCompiler compiler = new ExpressionCompiler(expr);
ParserContext context = new ParserContext();
context.addImport("System", System.class);
context.setStrictTypeEnforcement(true);
context.setDebugSymbols(true);
context.setSourceFile("mysource");
// Serializable compiledExpression = compiler.compile(context);
String s = org.mvel.debug.DebugTools.decompile(compiler.compile(context));
System.out.println("output: " + s);
int fromIndex = 0;
int count = 0;
while ((fromIndex = s.indexOf("DEBUG_SYMBOL", fromIndex + 1)) > -1) {
count++;
}
assertEquals(4, count);
}
public void testBreakpointsAcrossWith() {
String line1 = "System.out.println( \"a1\" );\n";
String line2 = "c = new Cheese();\n";
String line3 = "with ( c ) { type = 'cheddar',\n" +
" price = 10 };\n";
String line4 = "System.out.println( \"a1\" );\n";
String expr = line1 + line2 + line3 + line4;
System.out.println(expr);
ExpressionCompiler compiler = new ExpressionCompiler(expr);
ParserContext context = new ParserContext();
context.addImport("System", System.class);
context.addImport("Cheese", Cheese.class);
context.setStrictTypeEnforcement(true);
context.setDebugSymbols(true);
context.setSourceFile("mysource");
// Serializable compiledExpression = compiler.compile(context);
String s = org.mvel.debug.DebugTools.decompile(compiler.compile(context));
System.out.println("output: " + s);
int fromIndex = 0;
int count = 0;
while ((fromIndex = s.indexOf("DEBUG_SYMBOL", fromIndex + 1)) > -1) {
count++;
}
assertEquals(5, count);
}
public void testBreakpointsAcrossComments() {
String expression = "/** This is a comment\n" + // 1
" * Second comment line\n" + // 2
" * Third Comment Line\n" + // 3
" */\n" + // 4
"System.out.println('4');\n" + // 5
"System.out.println('5');\n" + // 6
"a = 0;\n" + // 7
"b = 1;\n" + // 8
"a + b"; // 9
ExpressionCompiler compiler = new ExpressionCompiler(expression);
compiler.setDebugSymbols(true);
System.out.println("Expression:\n------------");
System.out.println(expression);
System.out.println("------------");
ParserContext ctx = new ParserContext();
ctx.setSourceFile("test2.mv");
CompiledExpression compiled = compiler.compile(ctx);
System.out.println(DebugTools.decompile(compiled));
MVELRuntime.registerBreakpoint("test2.mv", 9);
Debugger testDebugger = new Debugger() {
public int onBreak(Frame frame) {
System.out.println("Breakpoint Encountered [source:" + frame.getSourceName() + "; line:" + frame.getLineNumber() + "]");
System.out.println("vars:" + frame.getFactory().getKnownVariables());
System.out.println("Resume Execution");
return 0;
}
};
MVELRuntime.setThreadDebugger(testDebugger);
assertEquals(1, MVEL.executeDebugger(compiled, null, new MapVariableResolverFactory(createTestMap())));
}
public void testBreakpointsAcrossComments2() {
ExpressionCompiler compiler = new ExpressionCompiler(
"// This is a comment\n" + // 1
"//Second comment line\n" + // 2
"//Third Comment Line\n" + // 3
"\n" + // 4
"//Test\n" + // 5
"System.out.println('4');\n" + // 6
"//System.out.println('5'); \n" + // 7
"a = 0;\n" + // 8
"b = 1;\n" + // 9
" a + b"); // 10
compiler.setDebugSymbols(true);
ParserContext ctx = new ParserContext();
ctx.setSourceFile("test2.mv");
CompiledExpression compiled = compiler.compile(ctx);
System.out.println(DebugTools.decompile(compiled));
MVELRuntime.registerBreakpoint("test2.mv", 6);
MVELRuntime.registerBreakpoint("test2.mv", 8);
MVELRuntime.registerBreakpoint("test2.mv", 9);
MVELRuntime.registerBreakpoint("test2.mv", 10);
Debugger testDebugger = new Debugger() {
public int onBreak(Frame frame) {
System.out.println("Breakpoint [source:" + frame.getSourceName() + "; line:" + frame.getLineNumber() + "]");
return 0;
}
};
MVELRuntime.setThreadDebugger(testDebugger);
assertEquals(1, MVEL.executeDebugger(compiled, null, new MapVariableResolverFactory(createTestMap())));
}
public void testBreakpoints4() {
String expression = "System.out.println('foo');\n" +
"a = new Foo();\n" +
"update (a) { name = 'bar' };\n" +
"System.out.println('name:' + a.name);\n" +
"return a.name;";
Map<String, Interceptor> interceptors = new HashMap<String, Interceptor>();
Map<String, Macro> macros = new HashMap<String, Macro>();
interceptors.put("Update", new Interceptor() {
public int doBefore(ASTNode node, VariableResolverFactory factory) {
((WithNode) node).getNestedStatement().getValue(null,
factory);
System.out.println("fired update interceptor -- before");
return 0;
}
public int doAfter(Object val, ASTNode node, VariableResolverFactory factory) {
System.out.println("fired update interceptor -- after");
return 0;
}
});
macros.put("update", new Macro() {
public String doMacro() {
return "@Update with";
}
});
expression = parseMacros(expression, macros);
ExpressionCompiler compiler = new ExpressionCompiler(expression);
compiler.setDebugSymbols(true);
ParserContext ctx = new ParserContext();
ctx.setSourceFile("test2.mv");
ctx.addImport("Foo", Foo.class);
ctx.setInterceptors(interceptors);
CompiledExpression compiled = compiler.compile(ctx);
System.out.println("\nExpression:------------");
System.out.println(expression);
System.out.println("------------");
System.out.println(DebugTools.decompile(compiled));
MVELRuntime.registerBreakpoint("test2.mv", 3);
MVELRuntime.registerBreakpoint("test2.mv", 4);
MVELRuntime.registerBreakpoint("test2.mv", 5);
Debugger testDebugger = new Debugger() {
public int onBreak(Frame frame) {
System.out.println("Breakpoint [source:" + frame.getSourceName() + "; line:" + frame.getLineNumber() + "]");
return 0;
}
};
MVELRuntime.setThreadDebugger(testDebugger);
assertEquals("bar", MVEL.executeDebugger(compiled, null, new MapVariableResolverFactory(createTestMap())));
}
public void testBreakpoints5() {
String expression = "System.out.println('foo');\r\n" +
"a = new Foo();\r\n" +
"a.name = 'bar';\r\n" +
"foo.happy();\r\n" +
"System.out.println( 'name:' + a.name ); \r\n" +
"System.out.println( 'name:' + a.name ); \r\n" +
"System.out.println( 'name:' + a.name ); \r\n" +
"return a.name;";
Map<String, Interceptor> interceptors = new HashMap<String, Interceptor>();
Map<String, Macro> macros = new HashMap<String, Macro>();
interceptors.put("Update", new Interceptor() {
public int doBefore(ASTNode node, VariableResolverFactory factory) {
((WithNode) node).getNestedStatement().getValue(null,
factory);
System.out.println("fired update interceptor -- before");
return 0;
}
public int doAfter(Object val, ASTNode node, VariableResolverFactory factory) {
System.out.println("fired update interceptor -- after");
return 0;
}
});
macros.put("update", new Macro() {
public String doMacro() {
return "@Update with";
}
});
expression = parseMacros(expression, macros);
ExpressionCompiler compiler = new ExpressionCompiler(expression);
compiler.setDebugSymbols(true);
ParserContext ctx = new ParserContext();
ctx.setSourceFile("test2.mv");
ctx.addImport("Foo", Foo.class);
ctx.setInterceptors(interceptors);
CompiledExpression compiled = compiler.compile(ctx);
System.out.println("\nExpression:------------");
System.out.println(expression);
System.out.println("------------");
System.out.println(DebugTools.decompile(compiled));
MVELRuntime.registerBreakpoint("test2.mv", 1);
Debugger testDebugger = new Debugger() {
public int onBreak(Frame frame) {
System.out.println("Breakpoint [source:" + frame.getSourceName() + "; line:" + frame.getLineNumber() + "]");
return Debugger.STEP_OVER;
}
};
MVELRuntime.setThreadDebugger(testDebugger);
System.out.println("\n==RUN==\n");
assertEquals("bar", MVEL.executeDebugger(compiled, null, new MapVariableResolverFactory(createTestMap())));
}
public void testDebugSymbolsWithWindowsLinedEndings() throws Exception {
String expr = " System.out.println( \"a1\" );\r\n" +
" System.out.println( \"a2\" );\r\n" +
" System.out.println( \"a3\" );\r\n" +
" System.out.println( \"a4\" );\r\n";
ExpressionCompiler compiler = new ExpressionCompiler(expr);
compiler.setDebugSymbols(true);
ParserContext ctx = new ParserContext();
ctx.setStrictTypeEnforcement(true);
ctx.setDebugSymbols(true);
ctx.setSourceFile("mysource");
// Serializable compiledExpression = compiler.compile(ctx);
String s = org.mvel.debug.DebugTools.decompile(compiler.compile(ctx));
System.out.println(s);
int fromIndex = 0;
int count = 0;
while ((fromIndex = s.indexOf("DEBUG_SYMBOL", fromIndex + 1)) > -1) {
count++;
}
assertEquals(4, count);
}
public void testDebugSymbolsWithUnixLinedEndings() throws Exception {
String expr = " System.out.println( \"a1\" );\n" +
" System.out.println( \"a2\" );\n" +
" System.out.println( \"a3\" );\n" +
" System.out.println( \"a4\" );\n";
ExpressionCompiler compiler = new ExpressionCompiler(expr);
compiler.setDebugSymbols(true);
ParserContext ctx = new ParserContext();
ctx.setStrictTypeEnforcement(true);
ctx.setDebugSymbols(true);
ctx.setSourceFile("mysource");
// Serializable compiledExpression = compiler.compile(ctx);
String s = org.mvel.debug.DebugTools.decompile(compiler.compile(ctx));
int fromIndex = 0;
int count = 0;
while ((fromIndex = s.indexOf("DEBUG_SYMBOL", fromIndex + 1)) > -1) {
count++;
}
assertEquals(4, count);
}
public void testDebugSymbolsWithMixedLinedEndings() throws Exception {
String expr = " System.out.println( \"a1\" );\n" +
" System.out.println( \"a2\" );\r\n" +
" System.out.println( \"a3\" );\n" +
" System.out.println( \"a4\" );\r\n";
ExpressionCompiler compiler = new ExpressionCompiler(expr);
compiler.setDebugSymbols(true);
ParserContext ctx = new ParserContext();
ctx.setStrictTypeEnforcement(true);
ctx.setDebugSymbols(true);
ctx.setSourceFile("mysource");
// Serializable compiledExpression = compiler.compile(ctx);
String s = org.mvel.debug.DebugTools.decompile(compiler.compile(ctx));
System.out.println(s);
int fromIndex = 0;
int count = 0;
while ((fromIndex = s.indexOf("DEBUG_SYMBOL", fromIndex + 1)) > -1) {
count++;
}
assertEquals(4, count);
}
public void testReflectionCache() {
assertEquals("happyBar", test("foo.happy(); foo.bar.happy()"));
}
public void testVarInputs() {
ExpressionCompiler compiler = new ExpressionCompiler("test != foo && bo.addSomething(trouble); String bleh = foo; twa = bleh;");
compiler.compile();
ParserContext pCtx = compiler.getParserContextState();
assertEquals(4, pCtx.getInputs().size());
assertTrue(pCtx.getInputs().containsKey("test"));
assertTrue(pCtx.getInputs().containsKey("foo"));
assertTrue(pCtx.getInputs().containsKey("bo"));
assertTrue(pCtx.getInputs().containsKey("trouble"));
assertEquals(2, pCtx.getVariables().size());
assertTrue(pCtx.getVariables().containsKey("bleh"));
assertTrue(pCtx.getVariables().containsKey("twa"));
assertEquals(String.class, pCtx.getVarOrInputType("bleh"));
}
public void testVarInputs2() {
ExpressionCompiler compiler = new ExpressionCompiler("test != foo && bo.addSomething(trouble); String bleh = foo; twa = bleh;");
ParserContext ctx = new ParserContext();
ctx.setRetainParserState(true);
compiler.compile(ctx);
System.out.println(ctx.getVarOrInputType("bleh"));
}
public void testVarInputs3() {
ExpressionCompiler compiler = new ExpressionCompiler("addresses['home'].street");
compiler.compile();
assertFalse(compiler.getParserContextState().getInputs().keySet().contains("home"));
}
public void testVarInputs4() {
ExpressionCompiler compiler = new ExpressionCompiler("System.out.println( message );");
compiler.compile();
assertTrue(compiler.getParserContextState().getInputs().keySet().contains("message"));
}
public void testAnalyzer() {
ExpressionCompiler compiler = new ExpressionCompiler("order.id == 10");
compiler.compile();
for (String input : compiler.getParserContextState().getInputs().keySet()) {
System.out.println("input>" + input);
}
assertEquals(1, compiler.getParserContextState().getInputs().size());
assertTrue(compiler.getParserContextState().getInputs().containsKey("order"));
}
public void testClassImportViaFactory() {
MapVariableResolverFactory mvf = new MapVariableResolverFactory(createTestMap());
ClassImportResolverFactory classes = new ClassImportResolverFactory();
classes.addClass(HashMap.class);
ResolverTools.appendFactory(mvf, classes);
// Serializable compiled = compileExpression("HashMap map = new HashMap()", classes.getImportedClasses());
assertTrue(executeExpression(
compileExpression("HashMap map = new HashMap()", classes.getImportedClasses()), mvf) instanceof HashMap);
}
public void testSataticClassImportViaFactory() {
MapVariableResolverFactory mvf = new MapVariableResolverFactory(createTestMap());
ClassImportResolverFactory classes = new ClassImportResolverFactory();
classes.addClass(Person.class);
ResolverTools.appendFactory(mvf, classes);
// Serializable compiled = compileExpression("p = new Person('tom'); return p.name;", classes.getImportedClasses());
assertEquals("tom",
executeExpression(compileExpression("p = new Person('tom'); return p.name;",
classes.getImportedClasses()), mvf));
}
public void testSataticClassImportViaFactoryAndWithModification() {
MapVariableResolverFactory mvf = new MapVariableResolverFactory(createTestMap());
ClassImportResolverFactory classes = new ClassImportResolverFactory();
classes.addClass(Person.class);
ResolverTools.appendFactory(mvf, classes);
// Serializable compiled = compileExpression("p = new Person('tom'); p.age = 20; with( p ) { age = p.age + 1 }; return p.age;", classes.getImportedClasses());
assertEquals(21, executeExpression(compileExpression(
"p = new Person('tom'); p.age = 20; with( p ) { age = p.age + 1 }; return p.age;",
classes.getImportedClasses()), mvf));
}
public void testCheeseConstructor() {
MapVariableResolverFactory mvf = new MapVariableResolverFactory(createTestMap());
ClassImportResolverFactory classes = new ClassImportResolverFactory();
classes.addClass(Cheese.class);
ResolverTools.appendFactory(mvf, classes);
// Serializable compiled = compileExpression("cheese = new Cheese(\"cheddar\", 15);", classes.getImportedClasses());
assertTrue(executeExpression(
compileExpression("cheese = new Cheese(\"cheddar\", 15);", classes.getImportedClasses()), mvf) instanceof Cheese);
}
public void testInterceptors() {
Interceptor testInterceptor = new Interceptor() {
public int doBefore(ASTNode node, VariableResolverFactory factory) {
System.out.println("BEFORE Node: " + node.getName());
return 0;
}
public int doAfter(Object val, ASTNode node, VariableResolverFactory factory) {
System.out.println("AFTER Node: " + node.getName());
return 0;
}
};
Map<String, Interceptor> interceptors = new HashMap<String, Interceptor>();
interceptors.put("test", testInterceptor);
// Serializable compiled = compileExpression("@test System.out.println('MIDDLE');", null, interceptors);
executeExpression(compileExpression("@test System.out.println('MIDDLE');", null, interceptors));
}
public void testMacroSupport() {
Map<String, Object> vars = new HashMap<String, Object>();
vars.put("foo", new Foo());
Map<String, Interceptor> interceptors = new HashMap<String, Interceptor>();
Map<String, Macro> macros = new HashMap<String, Macro>();
interceptors.put("Modify", new Interceptor() {
public int doBefore(ASTNode node, VariableResolverFactory factory) {
((WithNode) node).getNestedStatement().getValue(null,
factory);
factory.createVariable("mod", "FOOBAR!");
return 0;
}
public int doAfter(Object val, ASTNode node, VariableResolverFactory factory) {
return 0;
}
});
macros.put("modify", new Macro() {
public String doMacro() {
return "@Modify with";
}
});
ExpressionCompiler compiler = new ExpressionCompiler(parseMacros("modify (foo) { aValue = 'poo' }; mod", macros));
compiler.setDebugSymbols(true);
ParserContext ctx = new ParserContext(null, interceptors, null);
ctx.setSourceFile("test.mv");
// CompiledExpression compiled = compiler.compile(ctx);
assertEquals("FOOBAR!", executeExpression(compiler.compile(ctx), null, vars));
}
public void testMacroSupportWithDebugging() {
Map<String, Object> vars = new HashMap<String, Object>();
vars.put("foo", new Foo());
Map<String, Interceptor> interceptors = new HashMap<String, Interceptor>();
Map<String, Macro> macros = new HashMap<String, Macro>();
interceptors.put("Modify", new Interceptor() {
public int doBefore(ASTNode node, VariableResolverFactory factory) {
((WithNode) node).getNestedStatement().getValue(null,
factory);
factory.createVariable("mod", "FOOBAR!");
return 0;
}
public int doAfter(Object val, ASTNode node, VariableResolverFactory factory) {
return 0;
}
});
macros.put("modify", new Macro() {
public String doMacro() {
return "@Modify with";
}
});
ExpressionCompiler compiler = new ExpressionCompiler(
parseMacros(
"System.out.println('hello');\n" +
"System.out.println('bye');\n" +
"modify (foo) { aValue = 'poo', \n" +
" aValue = 'poo' };\n mod", macros)
);
compiler.setDebugSymbols(true);
ParserContext ctx = new ParserContext(null, interceptors, null);
ctx.setSourceFile("test.mv");
CompiledExpression compiled = compiler.compile(ctx);
MVELRuntime.setThreadDebugger(new Debugger() {
public int onBreak(Frame frame) {
System.out.println(frame.getSourceName() + ":" + frame.getLineNumber());
return Debugger.STEP;
}
});
MVELRuntime.registerBreakpoint("test.mv", 3);
System.out.println(DebugTools.decompile(compiled
));
assertEquals("FOOBAR!", MVEL.executeDebugger(compiled, null, new MapVariableResolverFactory(vars)));
}
public void testExecuteCoercionTwice() {
Map<String, Object> vars = new HashMap<String, Object>();
vars.put("foo", new Foo());
vars.put("$value", new Long(5));
ExpressionCompiler compiler = new ExpressionCompiler("with (foo) { countTest = $value };");
compiler.setDebugSymbols(true);
ParserContext ctx = new ParserContext();
ctx.setSourceFile("test.mv");
CompiledExpression compiled = compiler.compile(ctx);
executeExpression(compiled, null, vars);
executeExpression(compiled, null, vars);
}
public void testComments() {
assertEquals(10, test("// This is a comment\n5 + 5"));
}
public void testComments2() {
assertEquals(20, test("10 + 10; // This is a comment"));
}
public void testComments3() {
assertEquals(30, test("/* This is a test of\r\n" +
"MVEL's support for\r\n" +
"multi-line comments\r\n" +
"*/\r\n 15 + 15"));
}
public void testComments4() {
assertEquals(((10 + 20) * 2) - 10, test("/** This is a fun test script **/\r\n" +
"a = 10;\r\n" +
"/**\r\n" +
"* Here is a useful variable\r\n" +
"*/\r\n" +
"b = 20; // set b to '20'\r\n" +
"return ((a + b) * 2) - 10;\r\n" +
"// last comment\n"));
}
public void testSubtractNoSpace1() {
assertEquals(59, test("hour-1"));
}
public void testStrictTypingCompilation() {
ExpressionCompiler compiler = new ExpressionCompiler("a.foo;\nb.foo;\n x = 5");
ParserContext ctx = new ParserContext();
ctx.setStrictTypeEnforcement(true);
try {
compiler.compile(ctx);
}
catch (CompileException e) {
e.printStackTrace();
assertEquals(2, e.getErrors().size());
return;
}
assertTrue(false);
}
public void testStrictStaticMethodCall() {
ExpressionCompiler compiler = new ExpressionCompiler("Bar.staticMethod()");
ParserContext ctx = new ParserContext();
ctx.addImport("Bar", Bar.class);
ctx.setStrictTypeEnforcement(true);
Serializable s = compiler.compile(ctx);
DebugTools.decompile(s);
assertEquals(1, executeExpression(s));
}
public void testStrictTypingCompilation2() throws Exception {
ParserContext ctx = new ParserContext();
//noinspection RedundantArrayCreation
ctx.addImport("getRuntime", new MethodStub(Runtime.class.getMethod("getRuntime", new Class[]{})));
ctx.setStrictTypeEnforcement(true);
ExpressionCompiler compiler = new ExpressionCompiler("getRuntime()");
StaticMethodImportResolverFactory si = new StaticMethodImportResolverFactory(ctx);
Serializable expression = compiler.compile(ctx);
serializationTest(expression);
assertTrue(executeExpression(expression, si) instanceof Runtime);
}
public void testStrictTypingCompilation3() throws NoSuchMethodException {
ParserContext ctx = new ParserContext();
ctx.setStrictTypeEnforcement(true);
ExpressionCompiler compiler =
new ExpressionCompiler("message='Hello';b=7;\nSystem.out.println(message + ';' + b);\n" +
"System.out.println(message + ';' + b); b");
assertEquals(7, executeExpression(compiler.compile(ctx), new DefaultLocalVariableResolverFactory()));
}
public void testStrictTypingCompilation4() throws NoSuchMethodException {
ParserContext ctx = new ParserContext();
ctx.addImport(Foo.class);
ctx.setStrictTypeEnforcement(true);
ExpressionCompiler compiler =
new ExpressionCompiler("x_a = new Foo()");
compiler.compile(ctx);
assertEquals(Foo.class, ctx.getVariables().get("x_a"));
}
public void testProvidedExternalTypes() {
ExpressionCompiler compiler = new ExpressionCompiler("foo.bar");
ParserContext ctx = new ParserContext();
ctx.setStrictTypeEnforcement(true);
ctx.addInput("foo", Foo.class);
compiler.compile(ctx);
}
public void testEqualityRegression() {
ExpressionCompiler compiler = new ExpressionCompiler("price == (new Integer( 5 ) + 5 ) ");
compiler.compile();
}
public void testEvaluationRegression() {
ExpressionCompiler compiler = new ExpressionCompiler("(p.age * 2)");
compiler.compile();
assertTrue(compiler.getParserContextState().getInputs().containsKey("p"));
}
public void testAssignmentRegression() {
ExpressionCompiler compiler = new ExpressionCompiler("total = total + $cheese.price");
compiler.compile();
}
public void testTypeRegression() {
ExpressionCompiler compiler = new ExpressionCompiler("total = 0");
ParserContext ctx = new ParserContext();
ctx.setStrictTypeEnforcement(true);
compiler.compile(ctx);
assertEquals(Integer.class,
compiler.getParserContextState().getVarOrInputType("total"));
}
public void testDateComparison() {
assertTrue((Boolean) test("dt1 < dt2"));
}
public void testDynamicDeop() {
Serializable s = compileExpression("name");
assertEquals("dog", executeExpression(s, new Foo()));
assertEquals("dog", executeExpression(s, new Foo().getBar()));
}
public void testVirtProperty() {
// OptimizerFactory.setDefaultOptimizer("ASM");
Map<String, Object> testMap = new HashMap<String, Object>();
testMap.put("test", "foo");
Map<String, Object> vars = new HashMap<String, Object>();
vars.put("mp", testMap);
assertEquals("bar", executeExpression(compileExpression("mp.test = 'bar'; mp.test"), vars));
}
public void testMapPropertyCreateCondensed() {
assertEquals("foo", test("map = new java.util.HashMap(); map['test'] = 'foo'; map['test'];"));
}
public void testClassLiteral() {
assertEquals(String.class, test("java.lang.String"));
}
public void testDeepMethod() {
assertEquals(false, test("foo.bar.testList.add(new String()); foo.bar.testList == empty"));
}
public void testArrayAccessorAssign() {
assertEquals("foo", test("a = {'f00', 'bar'}; a[0] = 'foo'; a[0]"));
}
public void testListAccessorAssign() {
assertEquals("bar", test("a = new java.util.ArrayList(); a.add('foo'); a.add('BAR'); a[1] = 'bar'; a[1]"));
}
public void testBracketInString() {
test("System.out.println('1)your guess was:');");
}
public void testNesting() {
assertEquals("foo", test("new String(new String(new String(\"foo\")));"));
}
public void testDeepPropertyAdd() {
assertEquals(10, test("foo.countTest+ 10"));
}
public void testDeepAssignmentIncrement() {
assertEquals(true, test("foo.countTest += 5; if (foo.countTest == 5) { foo.countTest = 0; return true; } else { foo.countTest = 0; return false; }"));
}
public void testDeepAssignmentWithBlock() {
assertEquals(true, test("with (foo) { countTest += 5 }; if (foo.countTest == 5) { foo.countTest = 0; return true; } else { foo.countTest = 0; return false; }"));
}
public void testTypeCast() {
assertEquals("10", test("(String) 10"));
}
public void testMapAccessSemantics() {
Map<String, Object> outermap = new HashMap<String, Object>();
Map<String, Object> innermap = new HashMap<String, Object>();
innermap.put("test", "foo");
outermap.put("innermap", innermap);
assertEquals("foo", testCompiledSimple("innermap['test']", outermap, null));
}
public void testMapBindingSemantics() {
Map<String, Object> outermap = new HashMap<String, Object>();
Map<String, Object> innermap = new HashMap<String, Object>();
innermap.put("test", "foo");
outermap.put("innermap", innermap);
MVEL.setProperty(outermap, "innermap['test']", "bar");
assertEquals("bar", testCompiledSimple("innermap['test']", outermap, null));
}
public void testMapNestedInsideList() {
ParserContext ctx = new ParserContext();
ctx.addImport("User", User.class);
ExpressionCompiler compiler = new ExpressionCompiler("users = [ 'darth' : new User('Darth', 'Vadar'),\n'bobba' : new User('Bobba', 'Feta') ]; [ users.get('darth'), users.get('bobba') ]");
Serializable s = compiler.compile(ctx);
List list = (List) executeExpression(s);
User user = (User) list.get(0);
assertEquals("Darth", user.getFirstName());
user = (User) list.get(1);
assertEquals("Bobba", user.getFirstName());
compiler = new ExpressionCompiler("users = [ 'darth' : new User('Darth', 'Vadar'),\n'bobba' : new User('Bobba', 'Feta') ]; [ users['darth'], users['bobba'] ]");
s = compiler.compile(ctx);
list = (List) executeExpression(s);
user = (User) list.get(0);
assertEquals("Darth", user.getFirstName());
user = (User) list.get(1);
assertEquals("Bobba", user.getFirstName());
}
public void testListNestedInsideList() {
ParserContext ctx = new ParserContext();
ctx.addImport("User", User.class);
ExpressionCompiler compiler = new ExpressionCompiler("users = [ new User('Darth', 'Vadar'), new User('Bobba', 'Feta') ]; [ users.get( 0 ), users.get( 1 ) ]");
Serializable s = compiler.compile(ctx);
List list = (List) executeExpression(s);
User user = (User) list.get(0);
assertEquals("Darth", user.getFirstName());
user = (User) list.get(1);
assertEquals("Bobba", user.getFirstName());
compiler = new ExpressionCompiler("users = [ new User('Darth', 'Vadar'), new User('Bobba', 'Feta') ]; [ users[0], users[1] ]");
s = compiler.compile(ctx);
list = (List) executeExpression(s);
user = (User) list.get(0);
assertEquals("Darth", user.getFirstName());
user = (User) list.get(1);
assertEquals("Bobba", user.getFirstName());
}
public void testSetSemantics() {
Bar bar = new Bar();
Foo foo = new Foo();
assertEquals("dog", MVEL.getProperty("name", bar));
assertEquals("dog", MVEL.getProperty("name", foo));
}
public void testMapBindingSemantics2() {
Map<String, Object> outermap = new HashMap<String, Object>();
Map<String, Object> innermap = new HashMap<String, Object>();
innermap.put("test", "foo");
outermap.put("innermap", innermap);
Serializable s = MVEL.compileSetExpression("innermap['test']");
MVEL.executeSetExpression(s, outermap, "bar");
assertEquals("bar", testCompiledSimple("innermap['test']", outermap, null));
}
public void testDynamicImports() {
ParserContext ctx = new ParserContext();
ctx.addPackageImport("java.util");
ExpressionCompiler compiler = new ExpressionCompiler("HashMap");
Serializable s = compiler.compile(ctx);
assertEquals(HashMap.class, executeExpression(s));
compiler = new ExpressionCompiler("map = new HashMap(); map.size()");
s = compiler.compile(ctx);
assertEquals(0, executeExpression(s, new DefaultLocalVariableResolverFactory()));
}
public void testDynamicImportsInList() {
ParserContext ctx = new ParserContext();
ctx.addPackageImport("org.mvel.tests.core.res");
ExpressionCompiler compiler = new ExpressionCompiler("[ new User('Bobba', 'Feta') ]");
Serializable s = compiler.compile(ctx);
List list = (List) executeExpression(s);
User user = (User) list.get(0);
assertEquals("Bobba", user.getFirstName());
}
public void testDynamicImportsInMap() {
ParserContext ctx = new ParserContext();
ctx.addPackageImport("org.mvel.tests.core.res");
ExpressionCompiler compiler = new ExpressionCompiler("[ 'bobba' : new User('Bobba', 'Feta') ]");
Serializable s = compiler.compile(ctx);
Map map = (Map) executeExpression(s);
User user = (User) map.get("bobba");
assertEquals("Bobba", user.getFirstName());
}
public void testDynamicImportsOnNestedExpressions() {
ParserContext ctx = new ParserContext();
ctx.addPackageImport("org.mvel.tests.core.res");
ExpressionCompiler compiler = new ExpressionCompiler("new Cheesery(\"bobbo\", new Cheese(\"cheddar\", 15))");
Serializable s = compiler.compile(ctx);
Cheesery p1 = new Cheesery("bobbo", new Cheese("cheddar", 15));
Cheesery p2 = (Cheesery) executeExpression(s, new DefaultLocalVariableResolverFactory());
assertEquals(p1, p2);
}
public void testDynamicImportsWithNullConstructorParam() {
ParserContext ctx = new ParserContext();
ctx.addPackageImport("org.mvel.tests.core.res");
ExpressionCompiler compiler = new ExpressionCompiler("new Cheesery(\"bobbo\", null)");
Serializable s = compiler.compile(ctx);
Cheesery p1 = new Cheesery("bobbo", null);
Cheesery p2 = (Cheesery) executeExpression(s, new DefaultLocalVariableResolverFactory());
assertEquals(p1, p2);
}
public void testDynamicImportsWithIdentifierSameAsClassWithDiffCase() {
ParserContext ctx = new ParserContext();
ctx.addPackageImport("org.mvel.tests.core.res");
ctx.setStrictTypeEnforcement(false);
ExpressionCompiler compiler = new ExpressionCompiler("bar.add(\"hello\")");
compiler.compile(ctx);
}
public void testTypedAssignment() {
assertEquals("foobar", test("java.util.Map map = new java.util.HashMap(); map.put('conan', 'foobar'); map['conan'];"));
}
public void testFQCNwithStaticInList() {
assertEquals(Integer.MIN_VALUE, test("list = [java.lang.Integer.MIN_VALUE]; list[0]"));
}
public void testPrecedenceOrder() {
assertTrue((Boolean) test("5 > 6 && 2 < 1 || 10 > 9"));
}
public void testPrecedenceOrder1() {
String ex = "50 > 60 && 20 < 10 || 100 > 90";
System.out.println("Expression: " + ex);
assertTrue((Boolean) MVEL.eval(ex));
}
@SuppressWarnings({"unchecked"})
public void testDifferentImplSameCompile() {
Serializable compiled = compileExpression("a.funMap.hello");
Map testMap = new HashMap();
for (int i = 0; i < 100; i++) {
Base b = new Base();
b.funMap.put("hello", "dog");
testMap.put("a", b);
assertEquals("dog", executeExpression(compiled, testMap));
b = new Base();
b.funMap.put("hello", "cat");
testMap.put("a", b);
assertEquals("cat", executeExpression(compiled, testMap));
}
}
@SuppressWarnings({"unchecked"})
public void testInterfaceMethodCallWithSpace() {
Serializable compiled = compileExpression("drools.retract (cheese)");
Map map = new HashMap();
DefaultKnowledgeHelper helper = new DefaultKnowledgeHelper();
map.put("drools", helper);
Cheese cheese = new Cheese("stilton", 15);
map.put("cheese", cheese);
executeExpression(compiled, map);
assertSame(cheese, helper.retracted.get(0));
}
@SuppressWarnings({"unchecked"})
public void testInterfaceMethodCallWithMacro() {
Map macros = new HashMap(1);
macros.put("retract",
new Macro() {
public String doMacro() {
return "drools.retract";
}
});
Serializable compiled = compileExpression(parseMacros("retract(cheese)", macros));
Map map = new HashMap();
DefaultKnowledgeHelper helper = new DefaultKnowledgeHelper();
map.put("drools", helper);
Cheese cheese = new Cheese("stilton", 15);
map.put("cheese", cheese);
executeExpression(compiled, map);
assertSame(cheese, helper.retracted.get(0));
}
@SuppressWarnings({"UnnecessaryBoxing"})
public void testToList() {
String text = "misc.toList(foo.bar.name, 'hello', 42, ['key1' : 'value1', c : [ foo.bar.age, 'car', 42 ]], [42, [c : 'value1']] )";
List list = (List) test(text);
assertSame("dog", list.get(0));
assertEquals("hello", list.get(1));
assertEquals(new Integer(42), list.get(2));
Map map = (Map) list.get(3);
assertEquals("value1", map.get("key1"));
List nestedList = (List) map.get("cat");
assertEquals(14, nestedList.get(0));
assertEquals("car", nestedList.get(1));
assertEquals(42, nestedList.get(2));
nestedList = (List) list.get(4);
assertEquals(42, nestedList.get(0));
map = (Map) nestedList.get(1);
assertEquals("value1", map.get("cat"));
}
@SuppressWarnings({"UnnecessaryBoxing"})
public void testToListStrictMode() {
String text = "misc.toList(foo.bar.name, 'hello', 42, ['key1' : 'value1', c : [ foo.bar.age, 'car', 42 ]], [42, [c : 'value1']] )";
ParserContext ctx = new ParserContext();
ctx.addInput("misc", MiscTestClass.class);
ctx.addInput("foo", Foo.class);
ctx.addInput("c", String.class);
ctx.setStrictTypeEnforcement(true);
ExpressionCompiler compiler = new ExpressionCompiler(text);
Serializable expr = compiler.compile(ctx);
List list = (List) executeExpression(expr, createTestMap());
assertSame("dog", list.get(0));
assertEquals("hello", list.get(1));
assertEquals(new Integer(42), list.get(2));
Map map = (Map) list.get(3);
assertEquals("value1", map.get("key1"));
List nestedList = (List) map.get("cat");
assertEquals(14, nestedList.get(0));
assertEquals("car", nestedList.get(1));
assertEquals(42, nestedList.get(2));
nestedList = (List) list.get(4);
assertEquals(42, nestedList.get(0));
map = (Map) nestedList.get(1);
assertEquals("value1", map.get("cat"));
}
public void testParsingStability1() {
assertEquals(true, test("( order.number == 1 || order.number == ( 1+1) || order.number == $id )"));
}
public void testParsingStability2() {
ExpressionCompiler compiler = new ExpressionCompiler("( dim.height == 1 || dim.height == ( 1+1) || dim.height == x )");
Map<String, Object> imports = new HashMap<String, Object>();
imports.put("java.awt.Dimension", Dimension.class);
final ParserContext parserContext = new ParserContext(imports,
null,
"sourceFile");
parserContext.setStrictTypeEnforcement(false);
compiler.compile(parserContext);
}
public void testParsingStability3() {
assertEquals(false, test("!( [\"X\", \"Y\"] contains \"Y\" )"));
}
public void testParsingStability4() {
assertEquals(true, test("vv=\"Edson\"; !(vv ~= \"Mark\")"));
}
public void testConcatWithLineBreaks() {
ExpressionCompiler parser = new ExpressionCompiler("\"foo\"+\n\"bar\"");
ParserContext ctx = new ParserContext();
ctx.setDebugSymbols(true);
ctx.setSourceFile("source.mv");
Serializable c = parser.compile(ctx);
assertEquals("foobar", executeExpression(c));
}
/**
* Community provided test cases
*/
@SuppressWarnings({"unchecked"})
public void testCalculateAge() {
Calendar c1 = Calendar.getInstance();
c1.set(1999, 0, 10); // 1999 jan 20
Map objectMap = new HashMap(1);
Map propertyMap = new HashMap(1);
propertyMap.put("GEBDAT", c1.getTime());
objectMap.put("EV_VI_ANT1", propertyMap);
assertEquals("N", testCompiledSimple("new org.mvel.tests.core.res.PDFFieldUtil().calculateAge(EV_VI_ANT1.GEBDAT) >= 25 ? 'Y' : 'N'"
, null, objectMap));
}
/**
* Provided by: Alex Roytman
*/
public void testMethodResolutionWithNullParameter() {
Context ctx = new Context();
ctx.setBean(new Bean());
Map<String, Object> vars = new HashMap<String, Object>();
System.out.println("bean.today: " + eval("bean.today", ctx, vars));
System.out.println("formatDate(bean.today): " + eval("formatDate(bean.today)", ctx, vars));
//calling method with string param with null parameter works
System.out.println("formatString(bean.nullString): " + eval("formatString(bean.nullString)", ctx, vars));
System.out.println("bean.myDate = bean.nullDate: " + eval("bean.myDate = bean.nullDate; return bean.nullDate;", ctx, vars));
//calling method with Date param with null parameter fails
System.out.println("formatDate(bean.myDate): " + eval("formatDate(bean.myDate)", ctx, vars));
//same here
System.out.println(eval("formatDate(bean.nullDate)", ctx, vars));
}
/**
* Provided by: Phillipe Ombredanne
*/
public void testCompileParserContextShouldNotLoopIndefinitelyOnValidJavaExpression() {
String expr = " System.out.println( message );\n" + //
"m.setMessage( \"Goodbye cruel world\" );\n" + //
"System.out.println(m.getStatus());\n" + //
"m.setStatus( Message.GOODBYE );\n";
ExpressionCompiler compiler = new ExpressionCompiler(expr);
ParserContext context = new ParserContext();
context.setStrictTypeEnforcement(false);
context.addImport("Message", Message.class);
context.addInput("System", void.class);
context.addInput("message", Object.class);
context.addInput("m", Object.class);
compiler.compile(context);
}
public void testStaticNested() {
assertEquals(1, eval("org.mvel.tests.core.AbstractTest$Message.GOODBYE", new HashMap()));
}
public void testStaticNestedWithImport() {
String expr = "Message.GOODBYE;\n";
ExpressionCompiler compiler = new ExpressionCompiler(expr);
ParserContext context = new ParserContext();
context.setStrictTypeEnforcement(false);
context.addImport("Message", Message.class);
Serializable compiledExpression = compiler.compile(context);
assertEquals(1, executeExpression(compiledExpression));
}
public void testStaticNestedWithMethodCall() {
String expr = "item = new Item( \"Some Item\"); $msg.addItem( item ); return $msg";
ExpressionCompiler compiler = new ExpressionCompiler(expr);
ParserContext context = new ParserContext();
context.setStrictTypeEnforcement(false);
context.addImport("Message", Message.class);
context.addImport("Item", Item.class);
Serializable compiledExpression = compiler.compile(context);
Map vars = new HashMap();
vars.put("$msg", new Message());
Message msg = (Message) executeExpression(compiledExpression, vars);
Item item = (Item) msg.getItems().get(0);
assertEquals("Some Item", item.getName());
}
public void testsequentialAccessorsThenMethodCall() {
String expr = "System.out.println(drools.workingMemory); drools.workingMemory.ruleBase.removeRule(\"org.drools.examples\", \"some rule\"); ";
ExpressionCompiler compiler = new ExpressionCompiler(expr);
ParserContext context = new ParserContext();
context.setStrictTypeEnforcement(true);
context.addInput("drools", KnowledgeHelper.class);
RuleBase ruleBase = new RuleBaseImpl();
WorkingMemory wm = new WorkingMemoryImpl(ruleBase);
KnowledgeHelper drools = new DefaultKnowledgeHelper(wm);
Serializable compiledExpression = compiler.compile(context);
Map vars = new HashMap();
vars.put("drools", drools);
executeExpression(compiledExpression, vars);
}
/**
* Provided by: Aadi Deshpande
*/
public void testPropertyVerfierShoudldNotLoopIndefinately() {
String expr = "\t\tmodel.latestHeadlines = $list;\n" +
"model.latestHeadlines.add( 0, (model.latestHeadlines[2]) );";
ExpressionCompiler compiler = new ExpressionCompiler(expr);
compiler.setVerifying(true);
ParserContext pCtx = new ParserContext();
pCtx.addInput("$list", List.class);
pCtx.addInput("model", Model.class);
compiler.compile(pCtx);
}
public void testCompileWithNewInsideMethodCall() {
String expr = " p.name = \"goober\";\n" +
" System.out.println(p.name);\n" +
" drools.insert(new Address(\"Latona\"));\n";
ExpressionCompiler compiler = new ExpressionCompiler(expr);
ParserContext context = new ParserContext();
context.setStrictTypeEnforcement(false);
context.addImport("Person", Person.class);
context.addImport("Address", Address.class);
context.addInput("p", Person.class);
context.addInput("drools", Drools.class);
compiler.compile(context);
}
/**
* Submitted by: cleverpig
*/
public void testBug4() {
ClassA A = new ClassA();
ClassB B = new ClassB();
System.out.println(MVEL.getProperty("date", A));
System.out.println(MVEL.getProperty("date", B));
}
/**
* Submitted by: Michael Neale
*/
public void testInlineCollectionParser1() {
assertEquals("q", ((Map) test("['Person.age' : [1, 2, 3, 4],'Person.rating' : 'q']")).get("Person.rating"));
assertEquals("q", ((Map) test("['Person.age' : [1, 2, 3, 4], 'Person.rating' : 'q']")).get("Person.rating"));
}
public void testIndexer() {
assertEquals("foobar", testCompiledSimple("import java.util.LinkedHashMap; LinkedHashMap map = new LinkedHashMap();" +
" map.put('a', 'foo'); map.put('b', 'bar'); s = ''; foreach (key : map.keySet()) { System.out.println(map[key]); s += map[key]; }; return s;", createTestMap()));
}
public void testLateResolveOfClass() {
ExpressionCompiler compiler = new ExpressionCompiler("System.out.println(new Foo());");
ParserContext ctx = new ParserContext();
ctx.addImport(Foo.class);
CompiledExpression s = compiler.compile(ctx);
compiler.removeParserContext();
System.out.println(executeExpression(s));
}
public void testClassAliasing() {
assertEquals("foobar", test("Foo = String; new Foo('foobar')"));
}
public void testRandomExpression1() {
assertEquals("HelloWorld", test("if ((x15 = foo.bar) == foo.bar && x15 == foo.bar) { return 'HelloWorld'; } else { return 'GoodbyeWorld' } "));
}
public void testRandomExpression2() {
assertEquals(11, test("counterX = 0; foreach (item:{1,2,3,4,5,6,7,8,9,10}) { counterX++; }; return counterX + 1;"));
}
public void testRandomExpression3() {
assertEquals(0, test("counterX = 10; foreach (item:{1,1,1,1,1,1,1,1,1,1}) { counterX -= item; } return counterX;"));
}
public void testRandomExpression4() {
assertEquals(true, test("result = org.mvel.MVEL.eval('10 * 3'); result == (10 * 3);"));
}
public void testRandomExpression5() {
assertEquals(true, test("FooClassRef = foo.getClass(); fooInst = new FooClassRef(); name = org.mvel.MVEL.eval('name', fooInst); return name == 'dog'"));
}
public void testRandomExpression6() {
assertEquals(500, test("exprString = '250' + ' ' + '*' + ' ' + '2'; compiledExpr = org.mvel.MVEL.compileExpression(exprString);" +
" return org.mvel.MVEL.executeExpression(compiledExpr);"));
}
public void testRandomExpression7() {
assertEquals("FOOBAR", test("'foobar'.toUpperCase();"));
}
public void testRandomExpression8() {
assertEquals(true, test("'someString'.intern(); 'someString'.hashCode() == 'someString'.hashCode();"));
}
public void testRandomExpression9() {
assertEquals(false, test("_abc = 'someString'.hashCode(); _xyz = _abc + 1; _abc == _xyz"));
}
public void testRandomExpression10() {
assertEquals(false, test("(_abc = (_xyz = 'someString'.hashCode()) + 1); _abc == _xyz"));
}
/**
* Submitted by: Guerry Semones
*/
private Map<Object, Object> outerMap;
private Map<Object, Object> innerMap;
public void testAddIntToMapWithMapSyntax() throws Throwable {
outerMap = new HashMap<Object, Object>();
innerMap = new HashMap<Object, Object>();
outerMap.put("innerMap", innerMap);
// fails because mvel checks for 'foo' in the outerMap,
// rather than inside innerMap in outerMap
PropertyAccessor.set(outerMap, "innerMap['foo']", 42);
// mvel set it here
// assertEquals(42, outerMap.get("foo"));
// instead of here
assertEquals(42, innerMap.get("foo"));
}
public void testUpdateIntInMapWithMapSyntax() throws Throwable {
outerMap = new HashMap<Object, Object>();
innerMap = new HashMap<Object, Object>();
outerMap.put("innerMap", innerMap);
// fails because mvel checks for 'foo' in the outerMap,
// rather than inside innerMap in outerMap
innerMap.put("foo", 21);
PropertyAccessor.set(outerMap, "innerMap['foo']", 42);
// instead of updating it here
assertEquals(42, innerMap.get("foo"));
}
private HashMap<String, Object> context = new HashMap<String, Object>();
public void before() {
HashMap<String, Object> map = new HashMap<String, Object>();
MyBean bean = new MyBean();
bean.setVar(4);
map.put("bean", bean);
context.put("map", map);
}
public void testDeepProperty() {
before();
Serializable compiled = compileExpression("map.bean.var");
Object obj = executeExpression(compiled, context);
assertEquals(4, obj);
}
public void testDeepProperty2() {
before();
Serializable compiled = compileExpression("map.bean.getVar()");
Object obj = executeExpression(compiled, context);
assertEquals(4, obj);
}
public class MyBean {
int var;
public int getVar() {
return var;
}
public void setVar(int var) {
this.var = var;
}
}
public static class TargetClass {
private short _targetValue = 5;
public short getTargetValue() {
return _targetValue;
}
}
public void testNestedMethodCall() {
List elements = new ArrayList();
elements.add(new TargetClass());
Map variableMap = new HashMap();
variableMap.put("elements", elements);
eval(
"results = new java.util.ArrayList(); foreach (element : elements) { if( {5} contains element.targetValue.intValue()) { results.add(element); } }; results",
variableMap);
}
public void testBooleanEvaluation() {
assertEquals(true, test("true||false||false"));
}
public void testBooleanEvaluation2() {
assertEquals(true, test("equalityCheck(1,1)||fun||ackbar"));
}
/**
* Submitted by: Dimitar Dimitrov
*/
public void testFailing() {
Map<String, Object> map = new HashMap<String, Object>();
map.put("os", "windows");
assertTrue((Boolean) eval("os ~= 'windows|unix'", map));
}
public void testSuccess() {
Map<String, Object> map = new HashMap<String, Object>();
map.put("os", "windows");
assertTrue((Boolean) eval("'windows' ~= 'windows|unix'", map));
assertFalse((Boolean) eval("time ~= 'windows|unix'", new java.util.Date()));
}
public void testBooleanStrAppend() {
assertEquals("footrue", test("\"foo\" + true"));
}
public void testStringAppend() {
assertEquals("catbar", test("c + 'bar'"));
}
public void testConvertableTo() {
assertEquals(true, test("pi convertable_to Integer"));
}
public void testAssignPlus() {
assertEquals(10, test("xx0 = 5; xx0 += 4; xx0 + 1"));
}
public void testAssignPlus2() {
assertEquals(10, test("xx0 = 5; xx0 =+ 4; xx0 + 1"));
}
public void testAssignDiv() {
assertEquals(2, test("xx0 = 20; xx0 /= 10; xx0"));
}
public void testAssignMult() {
assertEquals(36, test("xx0 = 6; xx0 *= 6; xx0"));
}
public void testAssignSub() {
assertEquals(11, test("xx0 = 15; xx0 -= 4; xx0"));
}
public void testAssignSub2() {
assertEquals(-95, test("xx0 = 5; xx0 =- 100"));
}
public void testStaticWithExplicitParam() {
PojoStatic pojo = new PojoStatic("10");
eval("org.mvel.tests.core.res.AStatic.Process('10')", pojo, new HashMap());
}
public void testSimpleExpression() {
PojoStatic pojo = new PojoStatic("10");
eval("value!= null", pojo, new HashMap());
}
public void testStaticWithExpressionParam() {
PojoStatic pojo = new PojoStatic("10");
assertEquals("java.lang.String", eval("org.mvel.tests.core.res.AStatic.Process(value.getClass().getName().toString())", pojo));
}
public void testStringIndex() {
assertEquals(true, test("a = 'foobar'; a[4] == 'a'"));
}
public void testArrayConstructionSupport1() {
assertTrue(test("new String[5]") instanceof String[]);
}
public void testArrayConstructionSupport2() {
assertTrue((Boolean) test("xStr = new String[5]; xStr.size() == 5"));
}
public void testArrayConstructionSupport3() {
assertEquals("foo", test("xStr = new String[5][5]; xStr[4][0] = 'foo'; xStr[4][0]"));
}
public void testArrayConstructionSupport4() {
assertEquals(10, test("xStr = new String[5][10]; xStr[4][0] = 'foo'; xStr[4].length"));
}
public void testMath14() {
assertEquals(10 - 5 * 2 + 5 * 8 - 4, test("10-5*2 + 5*8-4"));
}
public void testMath15() {
String ex = "100-500*200 + 500*800-400";
// System.out.println("Expression: " + ex);
assertEquals(100 - 500 * 200 + 500 * 800 - 400, test(ex));
}
public void testMath16() {
String ex = "100-500*200*150 + 500*800-400";
assertEquals(100 - 500 * 200 * 150 + 500 * 800 - 400, test(ex));
}
public void testMath17() {
String ex = "(100 * 50) * 20 / 30 * 2";
// System.out.println("Expression: " + ex);
assertEquals((100d * 50d) * 20d / 30d * 2d, test(ex));
}
public void testMath18() {
String ex = "a = 100; b = 50; c = 20; d = 30; e = 2; (a * b) * c / d * e";
System.out.println("Expression: " + ex);
assertEquals((100d * 50d) * 20d / 30d * 2d, testCompiledSimple(ex, new HashMap()));
}
public void testMath19() {
String ex = "a = 100; b = 500; c = 200; d = 150; e = 500; f = 800; g = 400; a-b*c*d + e*f-g";
System.out.println("Expression: " + ex);
assertEquals(100 - 500 * 200 * 150 + 500 * 800 - 400, testCompiledSimple(ex, new HashMap()));
}
public void testMath32() {
String ex = "x = 20; y = 10; z = 5; x-y-z";
System.out.println("Expression: " + ex);
assertEquals(20 - 10 - 5, testCompiledSimple(ex, new HashMap()));
}
public void testMath33() {
String ex = "x = 20; y = 2; z = 2; x/y/z";
System.out.println("Expression: " + ex);
assertEquals(20 / 2 / 2, testCompiledSimple(ex, new HashMap()));
}
public void testMath20() {
String ex = "10-5*7-3*8-6";
System.out.println("Expression: " + ex);
assertEquals(10 - 5 * 7 - 3 * 8 - 6, test(ex));
}
public void testMath21() {
String expression = "100-50*70-30*80-60";
System.out.println("Expression: " + expression);
assertEquals(100 - 50 * 70 - 30 * 80 - 60, test(expression));
}
public void testMath22() {
String expression = "(100-50)*70-30*(20-9)**3";
System.out.println("Expression: " + expression);
assertEquals((int) ((100 - 50) * 70 - 30 * Math.pow(20 - 9, 3)), test(expression));
}
public void testMath22b() {
String expression = "a = 100; b = 50; c = 70; d = 30; e = 20; f = 9; g = 3; (a-b)*c-d*(e-f)**g";
System.out.println("Expression: " + expression);
assertEquals((int) ((100 - 50) * 70 - 30 * Math.pow(20 - 9, 3)), testCompiledSimple(expression, new HashMap()));
}
public void testMath23() {
String expression = "10 ** (3)*10**3";
System.out.println("Expression: " + expression);
assertEquals((int) (Math.pow(10, 3) * Math.pow(10, 3)), test(expression));
}
public void testMath24() {
String expression = "51 * 52 * 33 / 24 / 15 + 45 * 66 * 47 * 28 + 19";
double val = 51d * 52d * 33d / 24d / 15d + 45d * 66d * 47d * 28d + 19d;
System.out.println("Expression: " + expression);
System.out.println("Expected Result: " + val);
assertEquals(val, test(expression));
}
public void testMath25() {
String expression = "51 * (4 - 100 * 5) + 10 + 5 * 2 / 1 + 0 + 0 - 80";
int val = 51 * (4 - 100 * 5) + 10 + 5 * 2 / 1 + 0 + 0 - 80;
System.out.println("Expression: " + expression);
System.out.println("Expected Result: " + val);
assertEquals(val, test(expression));
}
public void testMath26() {
String expression = "5 + 3 * 8 * 2 ** 2";
int val = (int) (5d + 3d * 8d * Math.pow(2, 2));
System.out.println("Expression: " + expression);
System.out.println("Expected Result: " + val);
Object result = test(expression);
assertEquals(val, result);
}
public void testMath27() {
String expression = "50 + 30 * 80 * 20 ** 3 * 51";
double val = 50 + 30 * 80 * Math.pow(20, 3) * 51;
System.out.println("Expression: " + expression);
System.out.println("Expected Result: " + val);
Object result = test(expression);
assertEquals((int) val, result);
}
public void testMath28() {
String expression = "50 + 30 + 80 + 11 ** 2 ** 2 * 51";
double val = 50 + 30 + 80 + Math.pow(Math.pow(11, 2), 2) * 51;
Object result = test(expression);
assertEquals((int) val, result);
}
public void testMath29() {
String expression = "10 + 20 / 4 / 4";
System.out.println("Expression: " + expression);
double val = 10d + 20d / 4d / 4d;
assertEquals(val, MVEL.eval(expression));
}
public void testMath30() {
String expression = "40 / 20 + 10 + 6 / 2";
float val = 40f / 20f + 10f + 6f / 2f;
assertEquals((int) val, MVEL.eval(expression));
}
public void testMath31() {
String expression = "40 / 20 + 5 - 4 + 8 / 2 * 2 * 6 ** 2 + 6 - 8";
double val = 40f / 20f + 5f - 4f + 8f / 2f * 2f * Math.pow(6, 2) + 6f - 8f;
assertEquals((int) val, MVEL.eval(expression));
}
public void testMath34() {
String expression = "a+b-c*d*x/y-z+10";
Map map = new HashMap();
map.put("a", 200);
map.put("b", 100);
map.put("c", 150);
map.put("d", 2);
map.put("x", 400);
map.put("y", 300);
map.put("z", 75);
Serializable s = compileExpression(expression);
assertEquals(200 + 100 - 150 * 2 * 400 / 300 - 75 + 10, executeExpression(s, map));
}
public void testMath34_Interpreted() {
String expression = "a+b-c*x/y-z";
Map map = new HashMap();
map.put("a", 200);
map.put("b", 100);
map.put("c", 150);
map.put("x", 400);
map.put("y", 300);
map.put("z", 75);
assertEquals(200 + 100 - 150 * 400 / 300 - 75, MVEL.eval(expression, map));
}
public void testMath35() {
String expression = "b/x/b/b*y+a";
Map map = new HashMap();
map.put("a", 10);
map.put("b", 20);
map.put("c", 30);
map.put("x", 40);
map.put("y", 50);
map.put("z", 60);
Serializable s = compileExpression(expression);
assertNumEquals(20d / 40d / 20d / 20d * 50d + 10d, executeExpression(s, map));
}
public void testMath35_Interpreted() {
String expression = "b/x/b/b*y+a";
Map map = new HashMap();
map.put("a", 10);
map.put("b", 20);
map.put("c", 30);
map.put("x", 40);
map.put("y", 50);
map.put("z", 60);
assertNumEquals(20d / 40d / 20d / 20d * 50d + 10d, MVEL.eval(expression, map));
}
public void testMath36() {
String expression = "b/x*z/a+x-b+x-b/z+y";
Map map = new HashMap();
map.put("a", 10);
map.put("b", 20);
map.put("c", 30);
map.put("x", 40);
map.put("y", 50);
map.put("z", 60);
Serializable s = compileExpression(expression);
assertNumEquals(20d / 40d * 60d / 10d + 40d - 20d + 40d - 20d / 60d + 50d, executeExpression(s, map));
}
public void testMath37() {
String expression = "x+a*a*c/x*b*z+x/y-b";
Map map = new HashMap();
map.put("a", 10);
map.put("b", 20);
map.put("c", 30);
map.put("x", 2);
map.put("y", 2);
map.put("z", 60);
Serializable s = compileExpression(expression);
assertNumEquals(2d + 10d * 10d * 30d / 2d * 20d * 60d + 2d / 2d - 20d, executeExpression(s, map));
}
public void testNullSafe() {
Foo foo = new Foo();
foo.setBar(null);
Map map = new HashMap();
map.put("foo", foo);
String expression = "foo.?bar.name == null";
Serializable compiled = compileExpression(expression);
OptimizerFactory.setDefaultOptimizer("ASM");
assertEquals(true, executeExpression(compiled, map));
assertEquals(true, executeExpression(compiled, map)); // execute a second time (to search for optimizer problems)
OptimizerFactory.setDefaultOptimizer("reflective");
assertEquals(true, executeExpression(compiled, map));
assertEquals(true, executeExpression(compiled, map)); // execute a second time (to search for optimizer problems)
assertEquals(true, eval(expression, map));
}
/**
* MVEL-57 (Submitted by: Rognvald Eaversen) -- Slightly modified by cbrock to include a positive testcase.
*/
public void testMethodInvocationWithCollectionElement() {
context = new HashMap();
context.put("pojo", new POJO());
context.put("number", "1192800637980");
Object result = MVEL.eval("pojo.function(pojo.dates[0].time)", context);
assertEquals(String.valueOf(((POJO) context.get("pojo")).getDates().iterator().next().getTime()), result);
}
public void testNestedWithInList() {
Recipient recipient1 = new Recipient();
recipient1.setName("userName1");
recipient1.setEmail("[email protected]");
Recipient recipient2 = new Recipient();
recipient2.setName("userName2");
recipient2.setEmail("[email protected]");
List list = new ArrayList();
list.add(recipient1);
list.add(recipient2);
String text =
"array = [" +
"(with ( new Recipient() ) {name = 'userName1', email = '[email protected]' })," +
"(with ( new Recipient() ) {name = 'userName2', email = '[email protected]' })];\n";
ParserContext context = new ParserContext();
context.addImport(Recipient.class);
ExpressionCompiler compiler = new ExpressionCompiler(text);
Serializable execution = compiler.compile(context);
List result = (List) executeExpression(execution);
assertEquals(list, result);
}
// public void testNestedWithInMethod() {
// Recipient recipient1 = new Recipient();
// recipient1.setName("userName1");
// recipient1.setEmail("[email protected]");
//
// Recipients recipients = new Recipients();
// recipients.addRecipient(recipient1);
//
// String text =
// "recipients = new Recipients();\n" +
// "recipients.addRecipient( (with ( new Recipient() ) {name = 'userName1', email = '[email protected]' }) );\n" +
// "return recipients;\n";
//
// ParserContext context;
// context = new ParserContext();
// context.addImport(Recipient.class);
// context.addImport(Recipients.class);
//
// ExpressionCompiler compiler = new ExpressionCompiler(text);
// Serializable execution = compiler.compile(context);
// Recipients result = (Recipients) MVEL.executeExpression(execution);
// assertEquals(recipients, result);
// }
//
// public void testNestedWithInComplexGraph() {
// Recipients recipients = new Recipients();
//
// Recipient recipient1 = new Recipient();
// recipient1.setName("user1");
// recipient1.setEmail("[email protected]");
// recipients.addRecipient(recipient1);
//
// Recipient recipient2 = new Recipient();
// recipient2.setName("user2");
// recipient2.setEmail("[email protected]");
// recipients.addRecipient(recipient2);
//
// EmailMessage msg = new EmailMessage();
// msg.setRecipients(recipients);
// msg.setFrom("[email protected]");
//
// String text = "(with ( new EmailMessage() ) { recipients = (with (new Recipients()) { recipients = [(with ( new Recipient() ) {name = 'user1', email = '[email protected]'}), (with ( new Recipient() ) {name = 'user2', email = '[email protected]'}) ] }), " +
// " from = '[email protected]' } )";
// ParserContext context;
// context = new ParserContext();
// context.addImport(Recipient.class);
// context.addImport(Recipients.class);
// context.addImport(EmailMessage.class);
//
// ExpressionCompiler compiler = new ExpressionCompiler(text);
// Serializable execution = compiler.compile(context);
// EmailMessage result = (EmailMessage) MVEL.executeExpression(execution);
// assertEquals(msg, result);
// }
//
// public void testNestedWithInComplexGraph2() {
// Recipients recipients = new Recipients();
//
// Recipient recipient1 = new Recipient();
// recipient1.setName("user1");
// recipient1.setEmail("[email protected]");
// recipients.addRecipient(recipient1);
//
// Recipient recipient2 = new Recipient();
// recipient2.setName("user2");
// recipient2.setEmail("[email protected]");
// recipients.addRecipient(recipient2);
//
// EmailMessage msg = new EmailMessage();
// msg.setRecipients(recipients);
// msg.setFrom("[email protected]");
//
// String text = "";
// text += "with( new EmailMessage() ) { ";
// text += " recipients = with( new Recipients() ){ ";
// text += " recipients = [ with( new Recipient() ) { name = 'user1', email = '[email protected]' }, ";
// text += " with( new Recipient() ) { name = 'user2', email = '[email protected]' } ] ";
// text += " }, ";
// text += " from = '[email protected]' }";
// ParserContext context;
// context = new ParserContext();
// context.addImport(Recipient.class);
// context.addImport(Recipients.class);
// context.addImport(EmailMessage.class);
//
// ExpressionCompiler compiler = new ExpressionCompiler(text);
// Serializable execution = compiler.compile(context);
// EmailMessage result = (EmailMessage) MVEL.executeExpression(execution);
// assertEquals(msg, result);
// }
public void testNestedWithInComplexGraph3() {
Recipients recipients = new Recipients();
Recipient recipient1 = new Recipient();
recipient1.setName("user1");
recipient1.setEmail("[email protected]");
recipients.addRecipient(recipient1);
Recipient recipient2 = new Recipient();
recipient2.setName("user2");
recipient2.setEmail("[email protected]");
recipients.addRecipient(recipient2);
EmailMessage msg = new EmailMessage();
msg.setRecipients(recipients);
msg.setFrom("[email protected]");
String text = "";
text += "new EmailMessage().{ ";
text += " recipients = new Recipients().{ ";
text += " recipients = [ new Recipient().{ name = 'user1', email = '[email protected]' }, ";
text += " new Recipient().{ name = 'user2', email = '[email protected]' } ] ";
text += " }, ";
text += " from = '[email protected]' }";
ParserContext context;
context = new ParserContext();
context.addImport(Recipient.class);
context.addImport(Recipients.class);
context.addImport(EmailMessage.class);
OptimizerFactory.setDefaultOptimizer("ASM");
ExpressionCompiler compiler = new ExpressionCompiler(text);
Serializable execution = compiler.compile(context);
assertEquals(msg, executeExpression(execution));
assertEquals(msg, executeExpression(execution));
assertEquals(msg, executeExpression(execution));
OptimizerFactory.setDefaultOptimizer("reflective");
context = new ParserContext(context.getParserConfiguration());
compiler = new ExpressionCompiler(text);
execution = compiler.compile(context);
assertEquals(msg, executeExpression(execution));
assertEquals(msg, executeExpression(execution));
assertEquals(msg, executeExpression(execution));
}
public static class Recipient {
private String name;
private String email;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((email == null) ? 0 : email.hashCode());
result = prime * result + ((name == null) ? 0 : name.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
final Recipient other = (Recipient) obj;
if (email == null) {
if (other.email != null) return false;
}
else if (!email.equals(other.email)) return false;
if (name == null) {
if (other.name != null) return false;
}
else if (!name.equals(other.name)) return false;
return true;
}
}
public static class Recipients {
private List<Recipient> list = Collections.EMPTY_LIST;
public void setRecipients(List<Recipient> recipients) {
this.list = recipients;
}
public boolean addRecipient(Recipient recipient) {
if (list == Collections.EMPTY_LIST) {
this.list = new ArrayList<Recipient>();
}
if (!this.list.contains(recipient)) {
this.list.add(recipient);
return true;
}
return false;
}
public boolean removeRecipient(Recipient recipient) {
return this.list.remove(recipient);
}
public List<Recipient> getRecipients() {
return this.list;
}
public Recipient[] toArray() {
return list.toArray(new Recipient[list.size()]);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((list == null) ? 0 : list.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
final Recipients other = (Recipients) obj;
if (list == null) {
if (other.list != null) return false;
}
return list.equals(other.list);
}
}
public static class EmailMessage {
private Recipients recipients;
private String from;
public EmailMessage() {
}
public Recipients getRecipients() {
return recipients;
}
public void setRecipients(Recipients recipients) {
this.recipients = recipients;
}
public String getFrom() {
return from;
}
public void setFrom(String from) {
this.from = from;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((from == null) ? 0 : from.hashCode());
result = prime * result + ((recipients == null) ? 0 : recipients.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
final EmailMessage other = (EmailMessage) obj;
if (from == null) {
if (other.from != null) return false;
}
else if (!from.equals(other.from)) return false;
if (recipients == null) {
if (other.recipients != null) return false;
}
else if (!recipients.equals(other.recipients)) return false;
return true;
}
}
public class POJO {
private Set<Date> dates = new HashSet<Date>();
public POJO() {
dates.add(new Date());
}
public Set<Date> getDates() {
return dates;
}
public void setDates(Set<Date> dates) {
this.dates = dates;
}
public String function(long num) {
return String.valueOf(num);
}
}
public void testSubEvaluation() {
HashMap<String, Object> map = new HashMap<String, Object>();
map.put("EV_BER_BER_NR", "12345");
map.put("EV_BER_BER_PRIV", Boolean.FALSE);
assertEquals("12345", testCompiledSimple("EV_BER_BER_NR + ((EV_BER_BER_PRIV != empty && EV_BER_BER_PRIV == true) ? \"/PRIVAT\" : '')", null, map));
map.put("EV_BER_BER_PRIV", Boolean.TRUE);
assertEquals("12345/PRIVAT", testCompiledSimple("EV_BER_BER_NR + ((EV_BER_BER_PRIV != empty && EV_BER_BER_PRIV == true) ? \"/PRIVAT\" : '')", null, map));
}
public void testNestedMethod1() {
Vector vectorA = new Vector();
Vector vectorB = new Vector();
vectorA.add("Foo");
Map map = new HashMap();
map.put("vecA", vectorA);
map.put("vecB", vectorB);
testCompiledSimple("vecB.add(vecA.remove(0)); vecA.add('Foo');", null, map);
assertEquals("Foo", vectorB.get(0));
}
public void testNegativeArraySizeBug() throws Exception {
String expressionString1 = "results = new java.util.ArrayList(); foreach (element : elements) { if( ( {30, 214, 158, 31, 95, 223, 213, 86, 159, 34, 32, 96, 224, 160, 85, 201, 29, 157, 100, 146, 82, 203, 194, 145, 140, 81, 27, 166, 212, 38, 28, 94, 168, 23, 87, 150, 35, 149, 193, 33, 132, 206, 93, 196, 24, 88, 195, 36, 26, 154, 167, 108, 204, 74, 46, 25, 153, 202, 79, 207, 143, 43, 16, 80, 198, 208, 144, 41, 97, 142, 83, 18, 162, 103, 155, 98, 44, 17, 205, 77, 156, 141, 165, 102, 84, 37, 101, 222, 40, 104, 99, 177, 182, 22, 180, 21, 137, 221, 179, 78, 42, 178, 19, 183, 139, 218, 219, 39, 220, 20, 184, 217, 138, 62, 190, 171, 123, 113, 59, 118, 225, 124, 169, 60, 117, 1} contains element.attribute ) ) { results.add(element); } }; results";
String expressionString2 = "results = new java.util.ArrayList(); foreach (element : elements) { if( ( {30, 214, 158, 31, 95, 223, 213, 86, 159, 34, 32, 96, 224, 160, 85, 201, 29, 157, 100, 146, 82, 203, 194, 145, 140, 81, 27, 166, 212, 38, 28, 94, 168, 23, 87, 150, 35, 149, 193, 33, 132, 206, 93, 196, 24, 88, 195, 36, 26, 154, 167, 108, 204, 74, 46, 25, 153, 202, 79, 207, 143, 43, 16, 80, 198, 208, 144, 41, 97, 142, 83, 18, 162, 103, 155, 98, 44, 17, 205, 77, 156, 141, 165, 102, 84, 37, 101, 222, 40, 104, 99, 177, 182, 22, 180, 21, 137, 221, 179, 78, 42, 178, 19, 183, 139, 218, 219, 39, 220, 20, 184, 217, 138, 62, 190, 171, 123, 113, 59, 118, 225, 124, 169, 60, 117, 1, 61, 189, 122, 68, 58, 119, 63, 226, 3, 172} contains element.attribute ) ) { results.add(element); } }; results";
List<Target> targets = new ArrayList<Target>();
targets.add(new Target(1));
targets.add(new Target(999));
Map vars = new HashMap();
vars.put("elements", targets);
assertEquals(1, ((List) testCompiledSimple(expressionString1, null, vars)).size());
assertEquals(1, ((List) testCompiledSimple(expressionString2, null, vars)).size());
}
public static final class Target {
private int _attribute;
public Target(int attribute_) {
_attribute = attribute_;
}
public int getAttribute() {
return _attribute;
}
}
public void testFunctionDefAndCall() {
assertEquals("FoobarFoobar",
test("function heyFoo() { return 'Foobar'; };\n" +
"return heyFoo() + heyFoo();"));
}
public void testFunctionDefAndCall2() {
ExpressionCompiler compiler = new ExpressionCompiler("function heyFoo() { return 'Foobar'; };\n" +
"return heyFoo() + heyFoo();");
Serializable s = compiler.compile();
Map<String, Function> m = CompilerTools.extractAllDeclaredFunctions((CompiledExpression) s);
assertTrue(m.containsKey("heyFoo"));
OptimizerFactory.setDefaultOptimizer("reflective");
assertEquals("FoobarFoobar", executeExpression(s, new HashMap()));
assertEquals("FoobarFoobar", executeExpression(s, new HashMap()));
OptimizerFactory.setDefaultOptimizer("dynamic");
}
public void testFunctionDefAndCall3() {
assertEquals("FOOBAR", test("function testFunction() { a = 'foo'; b = 'bar'; a + b; }; testFunction().toUpperCase(); "));
}
public void testFunctionDefAndCall4() {
assertEquals("barfoo", test("function testFunction(input) { return input; }; testFunction('barfoo');"));
}
public void testFunctionDefAndCall5() {
assertEquals(10, test("function testFunction(x, y) { return x + y; }; testFunction(7, 3);"));
}
public void testFunctionDefAndCall6() {
assertEquals("foo", MVEL.eval("def fooFunction(x) x; fooFunction('foo')", new HashMap()));
}
public void testDynamicImports2() {
assertEquals(BufferedReader.class, test("import java.io.*; BufferedReader"));
}
public void testStringWithTernaryIf() {
test("System.out.print(\"Hello : \" + (foo != null ? \"FOO!\" : \"NO FOO\") + \". Bye.\");");
}
public void testFunctionsScript1() throws IOException {
MVEL.evalFile(new File("samples/scripts/functions1.mvel"));
}
public void testQuickSortScript1() throws IOException {
MVEL.evalFile(new File("samples/scripts/quicksort.mvel"));
}
public void testQuickSortScript2() throws IOException {
Object[] sorted = (Object[]) test(new String(loadFromFile(new File("samples/scripts/quicksort.mvel"))));
int last = -1;
for (Object o : sorted) {
if (last == -1) {
last = (Integer) o;
}
else {
assertTrue(((Integer) o) > last);
last = (Integer) o;
}
}
}
public void testQuickSortScript3() throws IOException {
Object[] sorted = (Object[]) test(new String(loadFromFile(new File("samples/scripts/quicksort2.mvel"))));
int last = -1;
for (Object o : sorted) {
if (last == -1) {
last = (Integer) o;
}
else {
assertTrue(((Integer) o) > last);
last = (Integer) o;
}
}
}
public void testMultiLineString() throws IOException {
MVEL.evalFile(new File("samples/scripts/multilinestring.mvel"));
}
public void testCompactIfElse() {
assertEquals("foo", test("if (false) 'bar'; else 'foo';"));
}
public void testAndOpLiteral() {
assertEquals(true, test("true && true"));
}
public void testAnonymousFunctionDecl() {
assertEquals(3, test("anonFunc = function (a,b) { return a + b; }; anonFunc(1,2)"));
}
public void testFunctionSemantics() {
assertEquals(true, test("function fooFunction(a) { return a; }; x__0 = ''; 'boob' == fooFunction(x__0 = 'boob') && x__0 == 'boob';"));
}
public void testUseOfVarKeyword() {
assertEquals("FOO_BAR", test("var barfoo = 'FOO_BAR'; return barfoo;"));
}
public void testAssignment5() {
assertEquals(15, test("x = (10) + (5); x"));
}
public void testSetExpressions1() {
Map<String, Object> myMap = new HashMap<String, Object>();
final Serializable fooExpr = MVEL.compileSetExpression("foo");
MVEL.executeSetExpression(fooExpr, myMap, "blah");
assertEquals("blah", myMap.get("foo"));
MVEL.executeSetExpression(fooExpr, myMap, "baz");
assertEquals("baz", myMap.get("foo"));
}
public void testInlineCollectionNestedObjectCreation() {
Map m = (Map) test("['Person.age' : [1, 2, 3, 4], 'Person.rating' : ['High', 'Low']," +
" 'Person.something' : (new String('foo').toUpperCase())]");
assertEquals("FOO", m.get("Person.something"));
}
public void testInlineCollectionNestedObjectCreation1() {
Map m = (Map) test("[new String('foo') : new String('bar')]");
assertEquals("bar", m.get("foo"));
}
public void testEgressType() {
ExpressionCompiler compiler = new ExpressionCompiler("( $cheese )");
ParserContext context = new ParserContext();
context.addInput("$cheese", Cheese.class);
assertEquals(Cheese.class, compiler.compile(context).getKnownEgressType());
}
public void testDuplicateVariableDeclaration() {
ExpressionCompiler compiler = new ExpressionCompiler("String x = \"abc\"; Integer x = new Integer( 10 );");
ParserContext context = new ParserContext();
try {
compiler.compile(context);
fail("Compilation must fail with duplicate variable declaration exception.");
}
catch (CompileException ce) {
// success
}
}
public void testFullyQualifiedTypeAndCast() {
assertEquals(1, test("java.lang.Integer number = (java.lang.Integer) '1';"));
}
public void testAnonymousFunction() {
assertEquals("foobar", test("a = function { 'foobar' }; a();"));
}
public void testThreadSafetyInterpreter1() {
//First evaluation
System.out.println("First evaluation: " + MVEL.eval("true"));
new Thread(new Runnable() {
public void run() {
// Second evaluation - this succeeds only if the first evaluation is not commented out
System.out.println("Second evaluation: " + MVEL.eval("true"));
}
}).start();
}
public void testStringEquals() {
assertEquals(true, test("ipaddr == '10.1.1.2'"));
}
public void testArrayList() throws SecurityException, NoSuchMethodException {
Collection<String> collection = new ArrayList<String>();
collection.add("I CAN HAS CHEEZBURGER");
assertEquals(collection.size(), MVEL.eval("size()", collection));
}
public void testUnmodifiableCollection() throws SecurityException, NoSuchMethodException {
Collection<String> collection = new ArrayList<String>();
collection.add("I CAN HAS CHEEZBURGER");
collection = unmodifiableCollection(collection);
assertEquals(collection.size(), MVEL.eval("size()", collection));
}
public void testSingleton() throws SecurityException, NoSuchMethodException {
Collection<String> collection = Collections.singleton("I CAN HAS CHEEZBURGER");
assertEquals(collection.size(), MVEL.eval("size()", collection));
}
public void testCharComparison() {
assertEquals(true, test("'z' > 'a'"));
}
public void testCharComparison2() {
assertEquals(false, test("'z' < 'a'"));
}
public void testRegExMatch() {
assertEquals(true, MVEL.eval("$test = 'foo'; $ex = 'f.*'; $test ~= $ex", new HashMap()));
}
public static class TestClass2 {
public void addEqualAuthorizationConstraint(Foo leg, Bar ctrlClass, Integer authorization) {
}
}
public void testJIRA93() {
Map testMap = createTestMap();
testMap.put("testClass2", new TestClass2());
Serializable s = compileExpression("testClass2.addEqualAuthorizationConstraint(foo, foo.bar, 5)");
for (int i = 0; i < 5; i++) {
executeExpression(s, testMap);
}
}
public void testJIRA96() {
ParserContext ctx = new ParserContext();
ctx.setStrictTypeEnforcement(true);
ctx.addInput("fooString", String[].class);
ExpressionCompiler compiler = new ExpressionCompiler("fooString[0].toUpperCase()");
compiler.compile(ctx);
}
public void testStrongTyping() {
ParserContext ctx = new ParserContext();
ctx.setStrongTyping(true);
try {
new ExpressionCompiler("blah").compile(ctx);
}
catch (Exception e) {
// should fail
return;
}
assertTrue(false);
}
public void testStrongTyping2() {
ParserContext ctx = new ParserContext();
ctx.setStrongTyping(true);
ctx.addInput("blah", String.class);
try {
new ExpressionCompiler("1-blah").compile(ctx);
}
catch (Exception e) {
e.printStackTrace();
return;
}
assertTrue(false);
}
public void testStringToArrayCast() {
Object o = test("(char[]) 'abcd'");
assertTrue(o instanceof char[]);
}
public void testStringToArrayCast2() {
assertTrue((Boolean) test("_xyxy = (char[]) 'abcd'; _xyxy[0] == 'a'"));
}
public void testStaticallyTypedArrayVar() {
assertTrue((Boolean) test("char[] _c___ = new char[10]; _c___ instanceof char[]"));
}
public void testParserErrorHandling() {
final ParserContext ctx = new ParserContext();
ExpressionCompiler compiler = new ExpressionCompiler("a[");
try {
compiler.compile(ctx);
}
catch (Exception e) {
return;
}
assertTrue(false);
}
public void testJIRA99_Interpreted() {
Map map = new HashMap();
map.put("x", 20);
map.put("y", 10);
map.put("z", 5);
assertEquals(20 - 10 - 5, MVEL.eval("x - y - z", map));
}
public void testJIRA99_Compiled() {
Map map = new HashMap();
map.put("x", 20);
map.put("y", 10);
map.put("z", 5);
assertEquals(20 - 10 - 5, testCompiledSimple("x - y - z", map));
}
public void testJIRA100() {
assertEquals(20, test("java.math.BigDecimal axx = new java.math.BigDecimal( 10.0 ); java.math.BigDecimal bxx = new java.math.BigDecimal( 10.0 ); java.math.BigDecimal cxx = axx + bxx; return cxx; "));
}
public void testJIRA100a() {
assertEquals(233.23, test("java.math.BigDecimal axx = new java.math.BigDecimal( 109.45 ); java.math.BigDecimal bxx = new java.math.BigDecimal( 123.78 ); java.math.BigDecimal cxx = axx + bxx; return cxx; "));
}
public void testJIRA100b() {
String expression = "(8 / 10) * 100 <= 80;";
assertEquals((8 / 10) * 100 <= 80, testCompiledSimple(expression, new HashMap()));
}
public void testJIRA92() {
assertEquals(false, test("'stringValue' > null"));
}
public void testAssignToBean() {
Person person = new Person();
MVEL.eval("this.name = 'foo'", person);
assertEquals("foo", person.getName());
executeExpression(compileExpression("this.name = 'bar'"), person);
assertEquals("bar", person.getName());
}
public void testParameterizedTypeInStrictMode() {
ParserContext ctx = new ParserContext();
ctx.setStrongTyping(true);
ctx.addInput("foo", HashMap.class, new Class[]{String.class, String.class});
ExpressionCompiler compiler = new ExpressionCompiler("foo.get('bar').toUpperCase()");
compiler.compile(ctx);
}
public void testParameterizedTypeInStrictMode2() {
ParserContext ctx = new ParserContext();
ctx.setStrongTyping(true);
ctx.addInput("ctx", Object.class);
ExpressionCompiler compiler = new ExpressionCompiler("org.mvel.DataConversion.convert(ctx, String).toUpperCase()");
// CompiledExpression ce = compiler.compile(ctx);
assertEquals(String.class, compiler.compile(ctx).getKnownEgressType());
}
public void testParameterizedTypeInStrictMode3() {
ParserContext ctx = new ParserContext();
ctx.setStrongTyping(true);
ctx.addInput("base", Base.class);
ExpressionCompiler compiler = new ExpressionCompiler("base.list");
assertTrue(compiler.compile(ctx).getParserContext().getLastTypeParameters()[0].equals(String.class));
}
public void testParameterizedTypeInStrictMode4() {
ParserContext ctx = new ParserContext();
ctx.setStrongTyping(true);
ctx.addInput("base", Base.class);
ExpressionCompiler compiler = new ExpressionCompiler("base.list.get(1).toUpperCase()");
CompiledExpression ce = compiler.compile(ctx);
assertEquals(String.class, ce.getKnownEgressType());
}
public void testMapAssignmentNestedExpression() {
Map map = new HashMap();
map.put("map", new HashMap());
String ex = "map[java.lang.Integer.MAX_VALUE] = 'bar'; map[java.lang.Integer.MAX_VALUE];";
assertEquals("bar", executeExpression(compileExpression(ex), map));
assertEquals("bar", MVEL.eval(ex, map));
}
public void testMapAssignmentNestedExpression2() {
Map map = new HashMap();
map.put("x", "bar");
map.put("map", new HashMap());
String ex = "map[x] = 'foo'; map['bar'];";
assertEquals("foo", executeExpression(compileExpression(ex), map));
assertEquals("foo", MVEL.eval(ex, map));
}
/**
* MVEL-103
*/
public static class MvelContext {
public boolean singleCalled;
public boolean arrayCalled;
public void methodForTest(String string) {
System.out.println("sigle param method called!");
singleCalled = true;
}
public void methodForTest(String[] strings) {
System.out.println("array param method called!");
arrayCalled = true;
}
}
public void testMethodResolutionOrder() {
MvelContext mvelContext = new MvelContext();
MVEL.eval("methodForTest({'1','2'})", mvelContext);
MVEL.eval("methodForTest('1')", mvelContext);
assertTrue(mvelContext.arrayCalled && mvelContext.singleCalled);
}
public void testOKQuoteComment() throws Exception {
// ' in comments outside of blocks seem OK
compileExpression("// ' this is OK!");
compileExpression("// ' this is OK!\n");
compileExpression("// ' this is OK!\nif(1==1) {};");
}
public void testOKDblQuoteComment() throws Exception {
// " in comments outside of blocks seem OK
compileExpression("// \" this is OK!");
compileExpression("// \" this is OK!\n");
compileExpression("// \" this is OK!\nif(1==1) {};");
}
public void testIfComment() throws Exception {
// No quote? OK!
compileExpression("if(1 == 1) {\n" +
" // Quote & Double-quote seem to break this expression\n" +
"}");
}
public void testIfQuoteCommentBug() throws Exception {
// Comments in an if seem to fail if they contain a '
compileExpression("if(1 == 1) {\n" +
" // ' seems to break this expression\n" +
"}");
}
public void testIfDblQuoteCommentBug() throws Exception {
// Comments in a foreach seem to fail if they contain a '
compileExpression("if(1 == 1) {\n" +
" // ' seems to break this expression\n" +
"}");
}
public void testForEachQuoteCommentBug() throws Exception {
// Comments in a foreach seem to fail if they contain a '
compileExpression("foreach ( item : 10 ) {\n" +
" // The ' character causes issues\n" +
"}");
}
public void testForEachDblQuoteCommentBug() throws Exception {
// Comments in a foreach seem to fail if they contain a '
compileExpression("foreach ( item : 10 ) {\n" +
" // The \" character causes issues\n" +
"}");
}
public void testForEachCommentOK() throws Exception {
// No quote? OK!
compileExpression("foreach ( item : 10 ) {\n" +
" // The quote & double quote characters cause issues\n" +
"}");
}
public void testElseIfCommentBugPreCompiled() throws Exception {
// Comments can't appear before else if() - compilation works, but evaluation fails
executeExpression(compileExpression("// This is never true\n" +
"if (1==0) {\n" +
" // Never reached\n" +
"}\n" +
"// This is always true...\n" +
"else if (1==1) {" +
" System.out.println('Got here!');" +
"}\n"));
}
public void testElseIfCommentBugEvaluated() throws Exception {
// Comments can't appear before else if()
MVEL.eval("// This is never true\n" +
"if (1==0) {\n" +
" // Never reached\n" +
"}\n" +
"// This is always true...\n" +
"else if (1==1) {" +
" System.out.println('Got here!');" +
"}\n");
}
public void testRegExpOK() throws Exception {
// This works OK intepreted
assertEquals(Boolean.TRUE, MVEL.eval("'Hello'.toUpperCase() ~= '[A-Z]{0,5}'"));
assertEquals(Boolean.TRUE, MVEL.eval("1 == 0 || ('Hello'.toUpperCase() ~= '[A-Z]{0,5}')"));
// This works OK if toUpperCase() is avoided in pre-compiled
assertEquals(Boolean.TRUE, executeExpression(compileExpression("'Hello' ~= '[a-zA-Z]{0,5}'")));
}
public void testRegExpPreCompiledBug() throws Exception {
// If toUpperCase() is used in the expression then this fails; returns null not
// a boolean.
Object ser = compileExpression("'Hello'.toUpperCase() ~= '[a-zA-Z]{0,5}'");
assertEquals(Boolean.TRUE, executeExpression(ser));
}
public void testRegExpOrBug() throws Exception {
// This fails during execution due to returning null, I think...
assertEquals(Boolean.TRUE, executeExpression(compileExpression("1 == 0 || ('Hello'.toUpperCase() ~= '[A-Z]{0,5}')")));
}
public void testRegExpAndBug() throws Exception {
// This also fails due to returning null, I think...
// Object ser = MVEL.compileExpression("1 == 1 && ('Hello'.toUpperCase() ~= '[A-Z]{0,5}')");
assertEquals(Boolean.TRUE, executeExpression(compileExpression("1 == 1 && ('Hello'.toUpperCase() ~= '[A-Z]{0,5}')")));
}
public void testLiteralUnionWithComparison() {
// Serializable ce = compileExpression("'Foo'.toUpperCase() == 'FOO'");
assertEquals(Boolean.TRUE, executeExpression(compileExpression("1 == 1 && ('Hello'.toUpperCase() ~= '[A-Z]{0,5}')")));
}
public static final List<String> STRINGS = Arrays.asList("hi", "there");
public static class A {
public List<String> getStrings() {
return STRINGS;
}
}
public final void testDetermineEgressParametricType() {
final ParserContext parserContext = new ParserContext();
parserContext.setStrongTyping(true);
parserContext.addInput("strings", List.class, new Class[]{String.class});
final CompiledExpression expr = new ExpressionCompiler("strings").compile(parserContext);
assertTrue(STRINGS.equals(executeExpression(expr, new A())));
final Type[] typeParameters = expr.getParserContext().getLastTypeParameters();
assertTrue(typeParameters != null);
assertTrue(String.class.equals(typeParameters[0]));
}
public final void testDetermineEgressParametricType2() {
final ParserContext parserContext = new ParserContext();
parserContext.setStrongTyping(true);
parserContext.addInput("strings", List.class, new Class[]{String.class});
final CompiledExpression expr = new ExpressionCompiler("strings", parserContext)
.compile();
assertTrue(STRINGS.equals(executeExpression(expr, new A())));
final Type[] typeParameters = expr.getParserContext().getLastTypeParameters();
assertTrue(null != typeParameters);
assertTrue(String.class.equals(typeParameters[0]));
}
public void testCustomPropertyHandler() {
PropertyHandlerFactory.registerPropertyHandler(SampleBean.class, new SampleBeanAccessor());
assertEquals("dog", test("foo.sampleBean.bar.name"));
}
public void testSetAccessorOverloadedEqualsStrictMode() {
ParserContext ctx = new ParserContext();
ctx.setStrongTyping(true);
ctx.addInput("foo", Foo.class);
try {
CompiledExpression expr = new ExpressionCompiler("foo.bar = 0").compile(ctx);
}
catch (CompileException e) {
// should fail.
e.printStackTrace();
return;
}
assertTrue(false);
}
public void testSetAccessorOverloadedEqualsStrictMode2() {
ParserContext ctx = new ParserContext();
ctx.setStrongTyping(true);
ctx.addInput("foo", Foo.class);
try {
CompiledExpression expr = new ExpressionCompiler("foo.aValue = 'bar'").compile(ctx);
}
catch (CompileException e) {
assertTrue(false);
}
}
public void testAnalysisCompile() {
CompiledExpression ce = new ExpressionCompiler("foo.aValue = 'bar'").compile();
assertTrue(ce.getParserContext().getInputs().keySet().contains("foo"));
}
public void testInlineWith() {
CompiledExpression expr = new ExpressionCompiler("foo.{name='poopy', aValue='bar'}").compile();
Foo f = (Foo) executeExpression(expr, createTestMap());
assertEquals("poopy", f.getName());
assertEquals("bar", f.aValue);
}
public void testInlineWith2() {
CompiledExpression expr = new ExpressionCompiler("foo.{name = 'poopy', aValue = 'bar', bar.{name = 'foobie'}}").compile();
Foo f = (Foo) executeExpression(expr, createTestMap());
assertEquals("poopy", f.getName());
assertEquals("bar", f.aValue);
assertEquals("foobie", f.getBar().getName());
}
public void testInlineWith3() {
CompiledExpression expr = new ExpressionCompiler("foo.{name = 'poopy', aValue = 'bar', bar.{name = 'foobie'}, toUC('doopy')}").compile();
Foo f = (Foo) executeExpression(expr, createTestMap());
assertEquals("poopy", f.getName());
assertEquals("bar", f.aValue);
assertEquals("foobie", f.getBar().getName());
assertEquals("doopy", f.register);
}
public void testInlineWith4() {
OptimizerFactory.setDefaultOptimizer("ASM");
ExpressionCompiler expr = new ExpressionCompiler("new Foo().{ name = 'bar' }");
ParserContext pCtx = new ParserContext();
pCtx.addImport(Foo.class);
CompiledExpression c = expr.compile(pCtx);
Foo f = (Foo) executeExpression(c);
assertEquals("bar", f.getName());
f = (Foo) executeExpression(c);
assertEquals("bar", f.getName());
}
public void testInlineWithImpliedThis() {
Base b = new Base();
ExpressionCompiler expr = new ExpressionCompiler(".{ data = 'foo' }");
CompiledExpression compiled = expr.compile();
executeExpression(compiled, b);
assertEquals(b.data, "foo");
}
public void testDataConverterStrictMode() throws Exception {
DataConversion.addConversionHandler(Date.class, new MVELDateCoercion());
ParserContext ctx = new ParserContext();
ctx.addImport("Cheese", Cheese.class);
ctx.setStrongTyping(true);
ctx.setStrictTypeEnforcement(true);
Cheese expectedCheese = new Cheese();
expectedCheese.setUseBy(new SimpleDateFormat("dd-MMM-yyyy").parse("10-Jul-1974"));
ExpressionCompiler compiler = new ExpressionCompiler("c = new Cheese(); c.useBy = '10-Jul-1974'; return c");
// Serializable expr = compiler.compile(ctx);
Cheese actualCheese = (Cheese) executeExpression(compiler.compile(ctx), createTestMap());
assertEquals(expectedCheese.getUseBy(), actualCheese.getUseBy());
}
public static class MVELDateCoercion implements ConversionHandler {
public boolean canConvertFrom(Class cls) {
if (cls == String.class || cls.isAssignableFrom(Date.class)) {
return true;
}
else {
return false;
}
}
public Object convertFrom(Object o) {
try {
SimpleDateFormat sdf = new SimpleDateFormat("dd-MMM-yyyy");
if (o instanceof String) {
return sdf.parse((String) o);
}
else {
return o;
}
}
catch (Exception e) {
throw new RuntimeException("Exception was thrown", e);
}
}
}
private static final KnowledgeHelperFixer fixer = new KnowledgeHelperFixer();
public void testSingleLineCommentSlash() {
String result = fixer.fix(" //System.out.println( \"help\" );\r\n System.out.println( \"help\" ); \r\n list.add( $person );");
assertEquals(" //System.out.println( \"help\" );\r\n System.out.println( \"help\" ); \r\n list.add( $person );",
result);
}
public void testSingleLineCommentHash() {
String result = fixer.fix(" #System.out.println( \"help\" );\r\n System.out.println( \"help\" ); \r\n list.add( $person );");
assertEquals(" #System.out.println( \"help\" );\r\n System.out.println( \"help\" ); \r\n list.add( $person );",
result);
}
public void testMultiLineComment() {
String result = fixer.fix(" /*System.out.println( \"help\" );\r\n*/ System.out.println( \"help\" ); \r\n list.add( $person );");
assertEquals(" /*System.out.println( \"help\" );\r\n*/ System.out.println( \"help\" ); \r\n list.add( $person );",
result);
}
public void testAdd__Handle__Simple() {
String result = fixer.fix("update(myObject );");
assertEqualsIgnoreWhitespace("drools.update(myObject );",
result);
result = fixer.fix("update ( myObject );");
assertEqualsIgnoreWhitespace("drools.update( myObject );",
result);
}
public void testAdd__Handle__withNewLines() {
final String result = fixer.fix("\n\t\n\tupdate( myObject );");
assertEqualsIgnoreWhitespace("\n\t\n\tdrools.update( myObject );",
result);
}
public void testAdd__Handle__rComplex() {
String result = fixer.fix("something update( myObject); other");
assertEqualsIgnoreWhitespace("something drools.update( myObject); other",
result);
result = fixer.fix("something update ( myObject );");
assertEqualsIgnoreWhitespace("something drools.update( myObject );",
result);
result = fixer.fix(" update( myObject ); x");
assertEqualsIgnoreWhitespace(" drools.update( myObject ); x",
result);
//should not touch, as it is not a stand alone word
result = fixer.fix("xxupdate(myObject ) x");
assertEqualsIgnoreWhitespace("xxupdate(myObject ) x",
result);
}
public void testMultipleMatches() {
String result = fixer.fix("update(myObject); update(myObject );");
assertEqualsIgnoreWhitespace("drools.update(myObject); drools.update(myObject );",
result);
result = fixer.fix("xxx update(myObject ); update( myObject ); update( yourObject ); yyy");
assertEqualsIgnoreWhitespace("xxx drools.update(myObject ); drools.update( myObject ); drools.update( yourObject ); yyy",
result);
}
public void testAssert1() {
final String raw = "insert( foo );";
final String result = "drools.insert( foo );";
assertEqualsIgnoreWhitespace(result,
fixer.fix(raw));
}
public void testAssert2() {
final String raw = "some code; insert( new String(\"foo\") );\n More();";
final String result = "some code; drools.insert( new String(\"foo\") );\n More();";
assertEqualsIgnoreWhitespace(result,
fixer.fix(raw));
}
public void testAssertLogical() {
final String raw = "some code; insertLogical(new String(\"foo\"));\n More();";
final String result = "some code; drools.insertLogical(new String(\"foo\"));\n More();";
assertEqualsIgnoreWhitespace(result,
fixer.fix(raw));
}
public void testModifyRetractModifyInsert() {
final String raw = "some code; insert( bar ); modifyRetract( foo );\n More(); retract( bar ); modifyInsert( foo );";
final String result = "some code; drools.insert( bar ); drools.modifyRetract( foo );\n More(); drools.retract( bar ); drools.modifyInsert( foo );";
assertEqualsIgnoreWhitespace(result,
fixer.fix(raw));
}
public void testAllActionsMushedTogether() {
String result = fixer.fix("insert(myObject ); update(ourObject);\t retract(herObject);");
assertEqualsIgnoreWhitespace("drools.insert(myObject ); drools.update(ourObject);\t drools.retract(herObject);",
result);
result = fixer.fix("insert( myObject ); update(ourObject);\t retract(herObject );\ninsert( myObject ); update(ourObject);\t retract( herObject );");
assertEqualsIgnoreWhitespace("drools.insert( myObject ); drools.update(ourObject);\t drools.retract(herObject );\ndrools.insert( myObject ); drools.update(ourObject);\t drools.retract( herObject );",
result);
}
public void testLeaveLargeAlone() {
final String original = "yeah yeah yeah minsert( xxx ) this is a long() thing Person (name=='drools') modify a thing";
final String result = fixer.fix(original);
assertEqualsIgnoreWhitespace(original,
result);
}
public void testWithNull() {
final String original = null;
final String result = fixer.fix(original);
assertEqualsIgnoreWhitespace(original,
result);
}
public void testLeaveAssertAlone() {
final String original = "drools.insert(foo)";
assertEqualsIgnoreWhitespace(original,
fixer.fix(original));
}
public void testLeaveAssertLogicalAlone() {
final String original = "drools.insertLogical(foo)";
assertEqualsIgnoreWhitespace(original,
fixer.fix(original));
}
public void testWackyAssert() {
final String raw = "System.out.println($person1.getName() + \" and \" + $person2.getName() +\" are sisters\");\n" + "insert($person1.getName(\"foo\") + \" and \" + $person2.getName() +\" are sisters\"); yeah();";
final String expected = "System.out.println($person1.getName() + \" and \" + $person2.getName() +\" are sisters\");\n" + "drools.insert($person1.getName(\"foo\") + \" and \" + $person2.getName() +\" are sisters\"); yeah();";
assertEqualsIgnoreWhitespace(expected,
fixer.fix(raw));
}
public void testMoreAssertCraziness() {
final String raw = "foobar(); (insert(new String(\"blah\").get()); bangBangYudoHono();)";
assertEqualsIgnoreWhitespace("foobar(); (drools.insert(new String(\"blah\").get()); bangBangYudoHono();)",
fixer.fix(raw));
}
public void testRetract() {
final String raw = "System.out.println(\"some text\");retract(object);";
assertEqualsIgnoreWhitespace("System.out.println(\"some text\");drools.retract(object);",
fixer.fix(raw));
}
private void assertEqualsIgnoreWhitespace(final String expected,
final String actual) {
if (expected == null || actual == null) {
assertEquals(expected,
actual);
return;
}
final String cleanExpected = expected.replaceAll("\\s+",
"");
final String cleanActual = actual.replaceAll("\\s+",
"");
assertEquals(cleanExpected,
cleanActual);
}
public void testIsDefOperator() {
assertEquals(true, test("_v1 = 'bar'; isdef _v1"));
}
public void testIsDefOperator2() {
assertEquals(false, test("isdef _v1"));
}
public void testIsDefOperator3() {
assertEquals(true, test("!(isdef _v1)"));
}
public void testIsDefOperator4() {
assertEquals(true, test("! (isdef _v1)"));
}
public void testReturnType1() {
assertEquals(Double.class, new ExpressionCompiler("100.5").compile().getKnownEgressType());
}
public void testReturnType2() {
assertEquals(Integer.class, new ExpressionCompiler("1").compile().getKnownEgressType());
}
public void testStrongTyping3() {
ParserContext ctx = new ParserContext();
ctx.setStrongTyping(true);
try {
new ExpressionCompiler("foo.toUC(100.5").compile(ctx);
}
catch (Exception e) {
// should fail.
return;
}
assertTrue(false);
}
public void testDoLoop() {
assertEquals(10, test("i = 0; do { i++ } while (i != 10); i"));
}
public void testDoLoop2() {
assertEquals(50, test("i=100;do{i--}until(i==50); i"));
}
public void testForLoop() {
assertEquals("012345", test("String str = ''; for(i=0;i<6;i++) { str += i }; str"));
}
public void testForLoop2() {
assertEquals("012345", MVEL.eval("String str = ''; for(i=0;i<6;i++) { str += i }; str", new HashMap()));
}
public void testUntilLoop() {
assertEquals("012345", test("String str = ''; int i = 0; until (i == 6) { str += i++; }; str"));
}
public void testXX() {
test("foo = 100; !foo");
}
public void testEgressType1() {
assertEquals(Boolean.class, new ExpressionCompiler("foo != null").compile().getKnownEgressType());
}
public void testIncrementInBooleanStatement() {
assertEquals(true, test("hour++ < 61 && hour == 61"));
}
public void testIncrementInBooleanStatement2() {
assertEquals(true, test("++hour == 61"));
}
public void testDeepNestedLoopsInFunction() {
assertEquals(10, test("def increment(i) { i + 1 }; def ff(i) { x = 0; while (i < 1) { " +
"x++; while (i < 10) { i = increment(i); } }; if (x == 1) return i; else -1; }; i = 0; ff(i);"));
}
public void testArrayDefinitionWithInitializer() {
String[] compareTo = new String[]{"foo", "bar"};
String[] results = (String[]) test("new String[] { 'foo', 'bar' }");
for (int i = 0; i < compareTo.length; i++) {
if (!compareTo[i].equals(results[i])) throw new AssertionError("arrays do not match.");
}
}
public void testStaticallyTypedItemInForEach() {
assertEquals("1234", test("StringBuffer sbuf = new StringBuffer(); foreach (int i : new int[] { 1,2,3,4 }) { sbuf.append(i); }; sbuf.toString()"));
}
public void testStaticallyTypedLong() {
assertEquals(10l, test("10l"));
}
public void testCompileTimeCoercion() {
ParserContext ctx = new ParserContext();
ctx.setStrongTyping(true);
ctx.addInput("foo", Foo.class);
// CompiledExpression c = new ExpressionCompiler("foo.bar.woof == 'true'").compile(ctx);
assertEquals(true, executeExpression( new ExpressionCompiler("foo.bar.woof == 'true'").compile(ctx), createTestMap()));
}
}
| src/test/java/org/mvel/tests/core/CoreConfidenceTests.java | package org.mvel.tests.core;
import org.mvel.*;
import static org.mvel.MVEL.*;
import org.mvel.ast.ASTNode;
import org.mvel.ast.Function;
import org.mvel.ast.WithNode;
import org.mvel.compiler.CompiledExpression;
import org.mvel.compiler.ExecutableStatement;
import org.mvel.compiler.ExpressionCompiler;
import org.mvel.debug.DebugTools;
import org.mvel.debug.Debugger;
import org.mvel.debug.Frame;
import org.mvel.integration.Interceptor;
import org.mvel.integration.PropertyHandlerFactory;
import org.mvel.integration.ResolverTools;
import org.mvel.integration.VariableResolverFactory;
import org.mvel.integration.impl.ClassImportResolverFactory;
import org.mvel.integration.impl.DefaultLocalVariableResolverFactory;
import org.mvel.integration.impl.MapVariableResolverFactory;
import org.mvel.integration.impl.StaticMethodImportResolverFactory;
import org.mvel.optimizers.OptimizerFactory;
import org.mvel.tests.core.res.*;
import org.mvel.util.CompilerTools;
import org.mvel.util.MethodStub;
import static org.mvel.util.ParseTools.loadFromFile;
import java.awt.*;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.Type;
import java.text.SimpleDateFormat;
import java.util.*;
import static java.util.Collections.unmodifiableCollection;
import java.util.List;
@SuppressWarnings({"ALL"})
public class CoreConfidenceTests extends AbstractTest {
public void testSingleProperty() {
assertEquals(false, test("fun"));
}
public void testMethodOnValue() {
assertEquals("DOG", test("foo.bar.name.toUpperCase()"));
}
public void testMethodOnValue2() {
assertEquals("DOG", test("foo. bar. name.toUpperCase()"));
}
public void testSimpleProperty() {
assertEquals("dog", test("foo.bar.name"));
}
public void testSimpleProperty2() {
assertEquals("cat", test("DATA"));
}
public void testPropertyViaDerivedClass() {
assertEquals("cat", test("derived.data"));
}
public void testDeepAssignment() {
Map map = createTestMap();
assertEquals("crap", testCompiledSimple("foo.bar.assignTest = 'crap'", map));
assertEquals("crap", testCompiledSimple("foo.bar.assignTest", map));
}
public void testDeepAssignment2() {
Map map = createTestMap();
ExpressionCompiler compiler = new ExpressionCompiler("foo.bar.age = 21");
ParserContext ctx = new ParserContext();
ctx.addInput("foo", Foo.class);
ctx.setStrongTyping(true);
CompiledExpression ce = compiler.compile(ctx);
MVEL.executeExpression(ce, map);
assertEquals(((Foo) map.get("foo")).getBar().getAge(), 21);
}
public void testThroughInterface() {
assertEquals("FOOBAR!", test("testImpl.name"));
}
public void testThroughInterface2() {
assertEquals(true, test("testImpl.foo"));
}
public void testMapAccessWithMethodCall() {
assertEquals("happyBar", test("funMap['foo'].happy()"));
}
public void testSimpleIfStatement() {
test("if (true) { System.out.println(\"test!\") } \n");
}
public void testBooleanOperator() {
assertEquals(true, test("foo.bar.woof == true"));
}
public void testBooleanOperator2() {
assertEquals(false, test("foo.bar.woof == false"));
}
public void testBooleanOperator3() {
assertEquals(true, test("foo.bar.woof== true"));
}
public void testBooleanOperator4() {
assertEquals(false, test("foo.bar.woof ==false"));
}
public void testBooleanOperator5() {
assertEquals(true, test("foo.bar.woof == true"));
}
public void testBooleanOperator6() {
assertEquals(false, test("foo.bar.woof==false"));
}
public void testBooleanOperatorWithCoercion() {
String expr = "foo.bar.woof==\"true\"";
ExpressionCompiler compiler = new ExpressionCompiler(expr);
ParserContext context = new ParserContext();
context.setStrictTypeEnforcement(true);
context.addInput( "foo", Foo.class );
Serializable compiledExpression = compiler.compile(context);
boolean result = (Boolean) MVEL.executeExpression( compiledExpression, createTestMap() );
assertTrue( result );
expr = "foo.bar.woof==\"false\"";
compiler = new ExpressionCompiler(expr);
context = new ParserContext();
context.setStrictTypeEnforcement(true);
context.addInput( "foo", Foo.class );
compiledExpression = compiler.compile(context);
result = (Boolean) MVEL.executeExpression( compiledExpression, createTestMap() );
assertFalse( result );
}
public void testTextComparison() {
assertEquals(true, test("foo.bar.name == 'dog'"));
}
public void testNETextComparison() {
assertEquals(true, test("foo.bar.name != 'foo'"));
}
public void testChor() {
assertEquals("cat", test("a or b or c"));
}
public void testChorWithLiteral() {
assertEquals("fubar", test("a or 'fubar'"));
}
public void testNullCompare() {
assertEquals(true, test("c != null"));
}
public void testUninitializedInt() {
assertEquals(0, test("sarahl"));
}
public void testAnd() {
assertEquals(true, test("c != null && foo.bar.name == 'dog' && foo.bar.woof"));
}
public void testAnd2() {
assertEquals(true, test("c!=null&&foo.bar.name=='dog'&&foo.bar.woof"));
}
public void testMath() {
assertEquals(188.4d, test("pi * hour"));
}
public void testMath2() {
assertEquals(3, test("foo.number-1"));
}
public void testMath3() {
assertEquals((10d * 5d) * 2d / 3d, test("(10 * 5) * 2 / 3"));
}
public void testMath4() {
int val = (int) ((100d % 3d) * 2d - 1d / 1d + 8d + (5d * 2d));
assertEquals(val, test("(100 % 3) * 2 - 1 / 1 + 8 + (5 * 2)"));
}
public void testMath4a() {
String expression = "(100 % 90) * 20 - 15 / 16 + 80 + (50 * 21)";
System.out.println("Expression: " + expression);
assertEquals(((100d % 90d) * 20d - 15d / 16d + 80d + (50d * 21d)), MVEL.eval(expression));
}
public void testMath5() {
assertEquals(300.5 / 5.3 / 2.1 / 1.5, test("300.5 / 5.3 / 2.1 / 1.5"));
}
public void testMath5a() {
String expression = "300.5 / 5.3 / 2.1 / 1.5";
System.out.println("Expression: " + expression);
assertEquals(300.5 / 5.3 / 2.1 / 1.5, MVEL.eval(expression));
}
public void testMath6() {
int val = (300 * 5 + 1) + 100 / 2 * 2;
assertEquals(val, test("(300 * five + 1) + (100 / 2 * 2)"));
}
public void testMath7() {
int val = (int) ((100d % 3d) * 2d - 1d / 1d + 8d + (5d * 2d));
assertEquals(val, test("(100 % 3) * 2 - 1 / 1 + 8 + (5 * 2)"));
}
public void testMath8() {
double val = 5d * (100.56d * 30.1d);
assertEquals(val, test("5 * (100.56 * 30.1)"));
}
public void testPowerOf() {
assertEquals(25, test("5 ** 2"));
}
public void testWhileUsingImports() {
Map<String, Object> imports = new HashMap<String, Object>();
imports.put("ArrayList", java.util.ArrayList.class);
imports.put("List", java.util.List.class);
ParserContext context = new ParserContext(imports, null, "testfile");
ExpressionCompiler compiler = new ExpressionCompiler("List list = new ArrayList(); return (list == empty)");
assertTrue((Boolean) MVEL.executeExpression(compiler.compile(context), new DefaultLocalVariableResolverFactory()));
}
public void testComplexExpression() {
assertEquals("bar", test("a = 'foo'; b = 'bar'; c = 'jim'; list = {a,b,c}; list[1]"));
}
public void testComplexAnd() {
assertEquals(true, test("(pi * hour) > 0 && foo.happy() == 'happyBar'"));
}
public void testOperatorPrecedence() {
String ex = "_x_001 = 500.2; _x_002 = 200.8; _r_001 = 701; _r_001 == _x_001 + _x_002 || _x_001 == 500 + 0.1";
assertEquals(true, test(ex));
}
public void testOperatorPrecedence2() {
String ex = "_x_001 = 500.2; _x_002 = 200.8; _r_001 = 701; _r_001 == _x_001 + _x_002 && _x_001 == 500 + 0.2";
assertEquals(true, test(ex));
}
public void testOperatorPrecedence3() {
String ex = "_x_001 = 500.2; _x_002 = 200.9; _r_001 = 701; _r_001 == _x_001 + _x_002 && _x_001 == 500 + 0.2";
assertEquals(false, test(ex));
}
public void testOperatorPrecedence4() {
String ex = "_x_001 = 500.2; _x_002 = 200.9; _r_001 = 701; _r_001 == _x_001 + _x_002 || _x_001 == 500 + 0.2";
assertEquals(true, test(ex));
}
public void testOperatorPrecedence5() {
String ex = "_x_001 == _x_001 / 2 - _x_001 + _x_001 + _x_001 / 2 && _x_002 / 2 == _x_002 / 2";
Map vars = new HashMap();
vars.put("_x_001", 500.2);
vars.put("_x_002", 200.9);
vars.put("_r_001", 701);
ExpressionCompiler compiler = new ExpressionCompiler(ex);
Serializable s = compiler.compile();
assertEquals(true, MVEL.executeExpression(s, vars));
}
public void testShortPathExpression() {
assertEquals(null, MVEL.eval("3 > 4 && foo.toUC('test'); foo.register", new Base(), createTestMap()));
}
public void testShortPathExpression2() {
assertEquals(true, test("4 > 3 || foo.toUC('test')"));
}
public void testShortPathExpression4() {
assertEquals(true, test("4>3||foo.toUC('test')"));
}
public void testOrOperator() {
assertEquals(true, test("true||true"));
}
public void testOrOperator2() {
assertEquals(true, test("2 > 3 || 3 > 2"));
}
public void testOrOperator3() {
assertEquals(true, test("pi > 5 || pi > 6 || pi > 3"));
}
public void testShortPathExpression3() {
assertEquals(false, test("defnull != null && defnull.length() > 0"));
}
public void testModulus() {
assertEquals(38392 % 2,
test("38392 % 2"));
}
public void testLessThan() {
assertEquals(true, test("pi < 3.15"));
assertEquals(true, test("pi <= 3.14"));
assertEquals(false, test("pi > 3.14"));
assertEquals(true, test("pi >= 3.14"));
}
public void testMethodAccess() {
assertEquals("happyBar", test("foo.happy()"));
}
public void testMethodAccess2() {
assertEquals("FUBAR", test("foo.toUC( 'fubar' )"));
}
public void testMethodAccess3() {
assertEquals(true, test("equalityCheck(c, 'cat')"));
}
public void testMethodAccess4() {
assertEquals(null, test("readBack(null)"));
}
public void testMethodAccess5() {
assertEquals("nulltest", test("appendTwoStrings(null, 'test')"));
}
public void testMethodAccess6() {
assertEquals(true, test(" equalityCheck( c \n , \n 'cat' ) "));
}
public void testNegation() {
assertEquals(true, test("!fun && !fun"));
}
public void testNegation2() {
assertEquals(false, test("fun && !fun"));
}
public void testNegation3() {
assertEquals(true, test("!(fun && fun)"));
}
public void testNegation4() {
assertEquals(false, test("(fun && fun)"));
}
public void testNegation5() {
assertEquals(true, test("!false"));
}
public void testNegation6() {
assertEquals(false, test("!true"));
}
public void testNegation7() {
assertEquals(true, test("s = false; t = !s; t"));
}
public void testNegation8() {
assertEquals(true, test("s = false; t =! s; t"));
}
public void testMultiStatement() {
assertEquals(true, test("populate(); barfoo == 'sarah'"));
}
public void testAssignment() {
assertEquals(true, test("populate(); blahfoo = 'sarah'; blahfoo == 'sarah'"));
}
public void testAssignment2() {
assertEquals("sarah", test("populate(); blahfoo = barfoo"));
}
public void testAssignment3() {
assertEquals(java.lang.Integer.class, test("blah = 5").getClass());
}
public void testAssignment4() {
assertEquals(102, test("a = 100 + 1 + 1"));
}
public void testAssignment6() {
assertEquals("blip", test("array[zero] = array[zero+1]; array[zero]"));
}
public void testOr() {
assertEquals(true, test("fun || true"));
}
public void testLiteralPassThrough() {
assertEquals(true, test("true"));
}
public void testLiteralPassThrough2() {
assertEquals(false, test("false"));
}
public void testLiteralPassThrough3() {
assertEquals(null, test("null"));
}
public void testLiteralReduction1() {
assertEquals("foo", test("null or 'foo'"));
}
public void testRegEx() {
assertEquals(true, test("foo.bar.name ~= '[a-z].+'"));
}
public void testRegExNegate() {
assertEquals(false, test("!(foo.bar.name ~= '[a-z].+')"));
}
public void testRegEx2() {
assertEquals(true, test("foo.bar.name ~= '[a-z].+' && foo.bar.name != null"));
}
public void testRegEx3() {
assertEquals(true, test("foo.bar.name~='[a-z].+'&&foo.bar.name!=null"));
}
public void testBlank() {
assertEquals(true, test("'' == empty"));
}
public void testBlank2() {
assertEquals(true, test("BWAH == empty"));
}
public void testBooleanModeOnly2() {
assertEquals(false, (Object) DataConversion.convert(test("BWAH"), Boolean.class));
}
public void testBooleanModeOnly4() {
assertEquals(true, test("hour == (hour + 0)"));
}
public void testTernary() {
assertEquals("foobie", test("zero==0?'foobie':zero"));
}
public void testTernary2() {
assertEquals("blimpie", test("zero==1?'foobie':'blimpie'"));
}
public void testTernary3() {
assertEquals("foobiebarbie", test("zero==1?'foobie':'foobie'+'barbie'"));
}
public void testStrAppend() {
assertEquals("foobarcar", test("'foo' + 'bar' + 'car'"));
}
public void testStrAppend2() {
assertEquals("foobarcar1", test("'foobar' + 'car' + 1"));
}
public void testInstanceCheck1() {
assertEquals(true, test("c is java.lang.String"));
}
public void testInstanceCheck2() {
assertEquals(false, test("pi is java.lang.Integer"));
}
public void testInstanceCheck3() {
assertEquals(true, test("foo is org.mvel.tests.core.res.Foo"));
}
public void testBitwiseOr1() {
assertEquals(6, test("2|4"));
}
public void testBitwiseOr2() {
assertEquals(true, test("(2 | 1) > 0"));
}
public void testBitwiseOr3() {
assertEquals(true, test("(2|1) == 3"));
}
public void testBitwiseOr4() {
assertEquals(2 | 5, test("2|five"));
}
public void testBitwiseAnd1() {
assertEquals(2, test("2 & 3"));
}
public void testBitwiseAnd2() {
assertEquals(5 & 3, test("five & 3"));
}
public void testShiftLeft() {
assertEquals(4, test("2 << 1"));
}
public void testShiftLeft2() {
assertEquals(5 << 1, test("five << 1"));
}
public void testUnsignedShiftLeft() {
assertEquals(2, test("-2 <<< 0"));
}
// public void testUnsignedShiftLeft2() {
// assertEquals(5, test("(five - 10) <<< 0"));
// }
public void testShiftRight() {
assertEquals(128, test("256 >> 1"));
}
public void testShiftRight2() {
assertEquals(5 >> 1, test("five >> 1"));
}
public void testUnsignedShiftRight() {
assertEquals(-5 >>> 1, test("-5 >>> 1"));
}
public void testUnsignedShiftRight2() {
assertEquals(-5 >>> 1, test("(five - 10) >>> 1"));
}
public void testXOR() {
assertEquals(3, test("1 ^ 2"));
}
public void testXOR2() {
assertEquals(5 ^ 2, test("five ^ 2"));
}
public void testContains1() {
assertEquals(true, test("list contains 'Happy!'"));
}
public void testContains2() {
assertEquals(false, test("list contains 'Foobie'"));
}
public void testContains3() {
assertEquals(true, test("sentence contains 'fox'"));
}
public void testContains4() {
assertEquals(false, test("sentence contains 'mike'"));
}
public void testContains5() {
assertEquals(true, test("!(sentence contains 'mike')"));
}
public void testContains6() {
assertEquals(true, test("bwahbwah = 'mikebrock'; testVar10 = 'mike'; bwahbwah contains testVar10"));
}
public void testInvert() {
assertEquals(~10, test("~10"));
}
public void testInvert2() {
assertEquals(~(10 + 1), test("~(10 + 1)"));
}
public void testInvert3() {
assertEquals(~10 + (1 + ~50), test("~10 + (1 + ~50)"));
}
public void testListCreation2() {
assertTrue(test("[\"test\"]") instanceof List);
}
public void testListCreation3() {
assertTrue(test("[66]") instanceof List);
}
public void testListCreation4() {
List ar = (List) test("[ 66 , \"test\" ]");
assertEquals(2, ar.size());
assertEquals(66, ar.get(0));
assertEquals("test", ar.get(1));
}
public void testListCreationWithCall() {
assertEquals(1, test("[\"apple\"].size()"));
}
public void testArrayCreationWithLength() {
assertEquals(2, test("Array.getLength({'foo', 'bar'})"));
}
public void testEmptyList() {
assertTrue(test("[]") instanceof List);
}
public void testEmptyArray() {
assertTrue(((Object[]) test("{}")).length == 0);
}
public void testEmptyArray2() {
assertTrue(((Object[]) test("{ }")).length == 0);
}
public void testArrayCreation() {
assertEquals(0, test("arrayTest = {{1, 2, 3}, {2, 1, 0}}; arrayTest[1][2]"));
}
public void testMapCreation() {
assertEquals("sarah", test("map = ['mike':'sarah','tom':'jacquelin']; map['mike']"));
}
public void testMapCreation2() {
assertEquals("sarah", test("map = ['mike' :'sarah' ,'tom' :'jacquelin' ]; map['mike']"));
}
public void testMapCreation3() {
assertEquals("foo", test("map = [1 : 'foo']; map[1]"));
}
public void testProjectionSupport() {
assertEquals(true, test("(name in things)contains'Bob'"));
}
public void testProjectionSupport1() {
assertEquals(true, test("(name in things) contains 'Bob'"));
}
public void testProjectionSupport2() {
assertEquals(3, test("(name in things).size()"));
}
public void testSizeOnInlineArray() {
assertEquals(3, test("{1,2,3}.size()"));
}
public void testSimpleListCreation() {
test("['foo', 'bar', 'foobar', 'FOOBAR']");
}
public void testStaticMethodFromLiteral() {
assertEquals(String.class.getName(), test("String.valueOf(Class.forName('java.lang.String').getName())"));
}
public void testObjectInstantiation() {
test("new java.lang.String('foobie')");
}
public void testObjectInstantiationWithMethodCall() {
assertEquals("FOOBIE", test("new String('foobie') . toUpperCase()"));
}
public void testObjectInstantiation2() {
test("new String() is String");
}
public void testObjectInstantiation3() {
test("new java.text.SimpleDateFormat('yyyy').format(new java.util.Date(System.currentTimeMillis()))");
}
public void testArrayCoercion() {
assertEquals("gonk", test("funMethod( {'gonk', 'foo'} )"));
}
public void testArrayCoercion2() {
assertEquals(10, test("sum({2,2,2,2,2})"));
}
public void testMapAccess() {
assertEquals("dog", test("funMap['foo'].bar.name"));
}
public void testMapAccess2() {
assertEquals("dog", test("funMap.foo.bar.name"));
}
public void testSoundex() {
assertTrue((Boolean) test("'foobar' soundslike 'fubar'"));
}
public void testSoundex2() {
assertFalse((Boolean) test("'flexbar' soundslike 'fubar'"));
}
public void testSoundex3() {
assertEquals(true, test("(c soundslike 'kat')"));
}
public void testSoundex4() {
assertEquals(true, test("_xx1 = 'cat'; _xx2 = 'katt'; (_xx1 soundslike _xx2)"));
}
public void testSoundex5() {
assertEquals(true, test("_type = 'fubar';_type soundslike \"foobar\""));
}
public void testSimilarity1() {
assertEquals(0.6666667f, test("c strsim 'kat'"));
}
public void testThisReference() {
assertEquals(true, test("this") instanceof Base);
}
public void testThisReference2() {
assertEquals(true, test("this.funMap") instanceof Map);
}
public void testThisReference3() {
assertEquals(true, test("this is org.mvel.tests.core.res.Base"));
}
public void testThisReference4() {
assertEquals(true, test("this.funMap instanceof java.util.Map"));
}
public void testThisReference5() {
assertEquals(true, test("this.data == 'cat'"));
}
public void testThisReferenceInMethodCall() {
assertEquals(101, test("Integer.parseInt(this.number)"));
}
public void testThisReferenceInConstructor() {
assertEquals("101", test("new String(this.number)"));
}
// interpreted
public void testThisReferenceMapVirtualObjects() {
Map<String, String> map = new HashMap<String, String>();
map.put("foo", "bar");
VariableResolverFactory factory = new MapVariableResolverFactory(new HashMap<String, Object>());
factory.createVariable("this", map);
assertEquals(true, eval("this.foo == 'bar'", map, factory));
}
// compiled - reflective
public void testThisReferenceMapVirtualObjects1() {
// Create our root Map object
Map<String, String> map = new HashMap<String, String>();
map.put("foo", "bar");
VariableResolverFactory factory = new MapVariableResolverFactory(new HashMap<String, Object>());
factory.createVariable("this", map);
Serializable compiled = MVEL.compileExpression("this.foo == 'bar'");
OptimizerFactory.setDefaultOptimizer("reflective");
// Run test
assertEquals(true, MVEL.executeExpression(compiled, map, factory));
}
// compiled - asm
public void testThisReferenceMapVirtualObjects2() {
// Create our root Map object
Map<String, String> map = new HashMap<String, String>();
map.put("foo", "bar");
VariableResolverFactory factory = new MapVariableResolverFactory(new HashMap<String, Object>());
factory.createVariable("this", map);
// I think we can all figure this one out.
Serializable compiled = MVEL.compileExpression("this.foo == 'bar'");
if (!Boolean.getBoolean("mvel.disable.jit")) OptimizerFactory.setDefaultOptimizer("ASM");
// Run test
assertEquals(true, MVEL.executeExpression(compiled, map, factory));
}
public void testStringEscaping() {
assertEquals("\"Mike Brock\"", test("\"\\\"Mike Brock\\\"\""));
}
public void testStringEscaping2() {
assertEquals("MVEL's Parser is Fast", test("'MVEL\\'s Parser is Fast'"));
}
public void testEvalToBoolean() {
assertEquals(true, (boolean) evalToBoolean("true ", "true"));
assertEquals(true, (boolean) evalToBoolean("true ", "true"));
}
public void testCompiledMapStructures() {
Serializable compiled = compileExpression("['foo':'bar'] contains 'foo'");
executeExpression(compiled, null, null, Boolean.class);
}
public void testSubListInMap() {
assertEquals("pear", test("map = ['test' : 'poo', 'foo' : [c, 'pear']]; map['foo'][1]"));
}
public void testCompiledMethodCall() {
Serializable compiled = compileExpression("c.getClass()");
assertEquals(String.class, executeExpression(compiled, new Base(), createTestMap()));
}
public void testStaticNamespaceCall() {
assertEquals(java.util.ArrayList.class, test("java.util.ArrayList"));
}
public void testStaticNamespaceClassWithMethod() {
assertEquals("FooBar", test("java.lang.String.valueOf('FooBar')"));
}
public void testConstructor() {
assertEquals("foo", test("a = 'foobar'; new String(a.toCharArray(), 0, 3)"));
}
public void testStaticNamespaceClassWithField() {
assertEquals(Integer.MAX_VALUE, test("java.lang.Integer.MAX_VALUE"));
}
public void testStaticNamespaceClassWithField2() {
assertEquals(Integer.MAX_VALUE, test("Integer.MAX_VALUE"));
}
public void testStaticFieldAsMethodParm() {
assertEquals(String.valueOf(Integer.MAX_VALUE), test("String.valueOf(Integer.MAX_VALUE)"));
}
public void testEmptyIf() {
assertEquals(5, test("a = 5; if (a == 5) { }; return a;"));
}
public void testEmptyIf2() {
assertEquals(5, test("a=5;if(a==5){};return a;"));
}
public void testIf() {
assertEquals(10, test("if (5 > 4) { return 10; } else { return 5; }"));
}
public void testIf2() {
assertEquals(10, test("if (5 < 4) { return 5; } else { return 10; }"));
}
public void testIf3() {
assertEquals(10, test("if(5<4){return 5;}else{return 10;}"));
}
public void testIfAndElse() {
assertEquals(true, test("if (false) { return false; } else { return true; }"));
}
public void testIfAndElseif() {
assertEquals(true, test("if (false) { return false; } else if(100 < 50) { return false; } else if (10 > 5) return true;"));
}
public void testIfAndElseIfCondensedGrammar() {
assertEquals("Foo", test("if (false) return 'Bar'; else return 'Foo';"));
}
public void testForEach2() {
assertEquals(6, test("total = 0; a = {1,2,3}; foreach(item : a) { total += item }; total"));
}
public void testForEach3() {
assertEquals(true, test("a = {1,2,3}; foreach (i : a) { if (i == 1) { return true; } }"));
}
public void testForEach4() {
assertEquals("OneTwoThreeFour", test("a = {1,2,3,4}; builder = ''; foreach (i : a) {" +
" if (i == 1) { builder += 'One' } else if (i == 2) { builder += 'Two' } " +
"else if (i == 3) { builder += 'Three' } else { builder += 'Four' }" +
"}; builder;"));
}
public void testWith() {
assertEquals("OneTwo", test("with (foo) {aValue = 'One',bValue='Two'}; foo.aValue + foo.bValue;"));
}
public void testWith2() {
assertEquals("OneTwo", test(
"with (foo) { \n" +
"aValue = 'One', // this is a comment \n" +
"bValue='Two' // this is also a comment \n" +
"}; \n" +
"foo.aValue + foo.bValue;"));
}
public void testMagicArraySize() {
assertEquals(5, test("stringArray.size()"));
}
public void testMagicArraySize2() {
assertEquals(5, test("intArray.size()"));
}
public void testStaticVarAssignment() {
assertEquals("1", test("String mikeBrock = 1; mikeBrock"));
}
public void testImport() {
assertEquals(HashMap.class, test("import java.util.HashMap; HashMap;"));
}
public void testStaticImport() {
assertEquals(2.0, test("import_static java.lang.Math.sqrt; sqrt(4)"));
}
public void testFunctionPointer() {
assertEquals(2.0, test("squareRoot = java.lang.Math.sqrt; squareRoot(4)"));
}
public void testFunctionPointerAsParam() {
assertEquals("2.0", test("squareRoot = Math.sqrt; new String(String.valueOf(squareRoot(4)));"));
}
public void testFunctionPointerInAssignment() {
assertEquals(5.0, test("squareRoot = Math.sqrt; i = squareRoot(25); return i;"));
}
public void testIncrementOperator() {
assertEquals(2, test("x = 1; x++; x"));
}
public void testPreIncrementOperator() {
assertEquals(2, test("x = 1; ++x"));
}
public void testDecrementOperator() {
assertEquals(1, test("x = 2; x--; x"));
}
public void testPreDecrementOperator() {
assertEquals(1, test("x = 2; --x"));
}
public void testQualifiedStaticTyping() {
assertEquals(20, test("java.math.BigDecimal a = new java.math.BigDecimal( 10.0 ); java.math.BigDecimal b = new java.math.BigDecimal( 10.0 ); java.math.BigDecimal c = a + b; return c; "));
}
public void testUnQualifiedStaticTyping() {
CompiledExpression ce = (CompiledExpression) MVEL.compileExpression("import java.math.BigDecimal; BigDecimal a = new BigDecimal( 10.0 ); BigDecimal b = new BigDecimal( 10.0 ); BigDecimal c = a + b; return c; ");
System.out.println(DebugTools.decompile(ce));
assertEquals(20, testCompiledSimple("import java.math.BigDecimal; BigDecimal a = new BigDecimal( 10.0 ); BigDecimal b = new BigDecimal( 10.0 ); BigDecimal c = a + b; return c; ", new HashMap()));
}
public void testObjectCreation() {
assertEquals(6, test("new Integer( 6 )"));
}
public void testTernary4() {
assertEquals("<test>", test("true ? '<test>' : '<poo>'"));
}
public void testStringAsCollection() {
assertEquals('o', test("abc = 'foo'; abc[1]"));
}
public void testSubExpressionIndexer() {
assertEquals("bar", test("xx = new java.util.HashMap(); xx.put('foo', 'bar'); prop = 'foo'; xx[prop];"));
}
public void testCompileTimeLiteralReduction() {
assertEquals(1000, test("10 * 100"));
}
public void testInterfaceResolution() {
Serializable ex = MVEL.compileExpression("foo.collectionTest.size()");
Map map = createTestMap();
Foo foo = (Foo) map.get("foo");
foo.setCollectionTest(new HashSet());
Object result1 = MVEL.executeExpression(ex, foo, map);
foo.setCollectionTest(new ArrayList());
Object result2 = MVEL.executeExpression(ex, foo, map);
assertEquals(result1, result2);
}
/**
* Start collections framework based compliance tests
*/
public void testCreationOfSet() {
assertEquals("foo bar foo bar",
test("set = new java.util.LinkedHashSet(); " +
"set.add('foo');" +
"set.add('bar');" +
"output = '';" +
"foreach (item : set) {" +
"output = output + item + ' ';" +
"} " +
"foreach (item : set) {" +
"output = output + item + ' ';" +
"} " +
"output = output.trim();" +
"if (set.size() == 2) { return output; }"));
}
public void testCreationOfList() {
assertEquals(5, test("l = new java.util.LinkedList();" +
"l.add('fun');" +
"l.add('happy');" +
"l.add('fun');" +
"l.add('slide');" +
"l.add('crap');" +
"poo = new java.util.ArrayList(l);" +
"poo.size();"));
}
public void testMapOperations() {
assertEquals("poo5", test(
"l = new java.util.ArrayList();" +
"l.add('plop');" +
"l.add('poo');" +
"m = new java.util.HashMap();" +
"m.put('foo', l);" +
"m.put('cah', 'mah');" +
"m.put('bar', 'foo');" +
"m.put('sarah', 'mike');" +
"m.put('edgar', 'poe');" +
"" +
"if (m.edgar == 'poe') {" +
"return m.foo[1] + m.size();" +
"}"));
}
public void testStackOperations() {
assertEquals(10, test(
"stk = new java.util.Stack();" +
"stk.push(5);" +
"stk.push(5);" +
"stk.pop() + stk.pop();"
));
}
public void testSystemOutPrint() {
test("a = 0;\r\nSystem.out.println('This is a test');");
}
public void testBreakpoints() {
ExpressionCompiler compiler = new ExpressionCompiler("a = 5;\nb = 5;\n\nif (a == b) {\n\nSystem.out.println('Good');\nreturn a + b;\n}\n");
System.out.println("-------\n" + compiler.getExpression() + "\n-------\n");
compiler.setDebugSymbols(true);
ParserContext ctx = new ParserContext();
ctx.setSourceFile("test.mv");
CompiledExpression compiled = compiler.compile(ctx);
System.out.println(DebugTools.decompile(compiled));
MVELRuntime.registerBreakpoint("test.mv", 7);
Debugger testDebugger = new Debugger() {
public int onBreak(Frame frame) {
System.out.println("Breakpoint [source:" + frame.getSourceName() + "; line:" + frame.getLineNumber() + "]");
return 0;
}
};
MVELRuntime.setThreadDebugger(testDebugger);
assertEquals(10, MVEL.executeDebugger(compiled, null, new MapVariableResolverFactory(createTestMap())));
}
public void testBreakpoints2() {
ExpressionCompiler compiler = new ExpressionCompiler("System.out.println('test the debugger');\n a = 0;");
compiler.setDebugSymbols(true);
ParserContext ctx = new ParserContext();
ctx.setSourceFile("test.mv");
CompiledExpression compiled = compiler.compile(ctx);
System.out.println(DebugTools.decompile(compiled));
}
public void testBreakpoints3() {
String expr = "System.out.println( \"a1\" );\n" +
"System.out.println( \"a2\" );\n" +
"System.out.println( \"a3\" );\n" +
"System.out.println( \"a4\" );\n";
ExpressionCompiler compiler = new ExpressionCompiler(expr);
ParserContext context = new ParserContext();
context.addImport("System", System.class);
context.setStrictTypeEnforcement(true);
context.setDebugSymbols(true);
context.setSourceFile("mysource");
Serializable compiledExpression = compiler.compile(context);
String s = org.mvel.debug.DebugTools.decompile(compiledExpression);
System.out.println("output: " + s);
int fromIndex = 0;
int count = 0;
while ((fromIndex = s.indexOf("DEBUG_SYMBOL", fromIndex + 1)) > -1) {
count++;
}
assertEquals(4, count);
}
public void testBreakpointsAcrossWith() {
String line1 = "System.out.println( \"a1\" );\n";
String line2 = "c = new Cheese();\n";
String line3 = "with ( c ) { type = 'cheddar',\n" +
" price = 10 };\n";
String line4 = "System.out.println( \"a1\" );\n";
String expr = line1 + line2 + line3 + line4;
System.out.println(expr);
ExpressionCompiler compiler = new ExpressionCompiler(expr);
ParserContext context = new ParserContext();
context.addImport("System", System.class);
context.addImport("Cheese", Cheese.class);
context.setStrictTypeEnforcement(true);
context.setDebugSymbols(true);
context.setSourceFile("mysource");
Serializable compiledExpression = compiler.compile(context);
String s = org.mvel.debug.DebugTools.decompile(compiledExpression);
System.out.println("output: " + s);
int fromIndex = 0;
int count = 0;
while ((fromIndex = s.indexOf("DEBUG_SYMBOL", fromIndex + 1)) > -1) {
count++;
}
assertEquals(5, count);
}
public void testBreakpointsAcrossComments() {
String expression = "/** This is a comment\n" + // 1
" * Second comment line\n" + // 2
" * Third Comment Line\n" + // 3
" */\n" + // 4
"System.out.println('4');\n" + // 5
"System.out.println('5');\n" + // 6
"a = 0;\n" + // 7
"b = 1;\n" + // 8
"a + b"; // 9
ExpressionCompiler compiler = new ExpressionCompiler(expression);
compiler.setDebugSymbols(true);
System.out.println("Expression:\n------------");
System.out.println(expression);
System.out.println("------------");
ParserContext ctx = new ParserContext();
ctx.setSourceFile("test2.mv");
CompiledExpression compiled = compiler.compile(ctx);
System.out.println(DebugTools.decompile(compiled));
MVELRuntime.registerBreakpoint("test2.mv", 9);
Debugger testDebugger = new Debugger() {
public int onBreak(Frame frame) {
System.out.println("Breakpoint Encountered [source:" + frame.getSourceName() + "; line:" + frame.getLineNumber() + "]");
System.out.println("vars:" + frame.getFactory().getKnownVariables());
System.out.println("Resume Execution");
return 0;
}
};
MVELRuntime.setThreadDebugger(testDebugger);
assertEquals(1, MVEL.executeDebugger(compiled, null, new MapVariableResolverFactory(createTestMap())));
}
public void testBreakpointsAcrossComments2() {
ExpressionCompiler compiler = new ExpressionCompiler(
"// This is a comment\n" + // 1
"//Second comment line\n" + // 2
"//Third Comment Line\n" + // 3
"\n" + // 4
"//Test\n" + // 5
"System.out.println('4');\n" + // 6
"//System.out.println('5'); \n" + // 7
"a = 0;\n" + // 8
"b = 1;\n" + // 9
" a + b"); // 10
compiler.setDebugSymbols(true);
ParserContext ctx = new ParserContext();
ctx.setSourceFile("test2.mv");
CompiledExpression compiled = compiler.compile(ctx);
System.out.println(DebugTools.decompile(compiled));
MVELRuntime.registerBreakpoint("test2.mv", 6);
MVELRuntime.registerBreakpoint("test2.mv", 8);
MVELRuntime.registerBreakpoint("test2.mv", 9);
MVELRuntime.registerBreakpoint("test2.mv", 10);
Debugger testDebugger = new Debugger() {
public int onBreak(Frame frame) {
System.out.println("Breakpoint [source:" + frame.getSourceName() + "; line:" + frame.getLineNumber() + "]");
return 0;
}
};
MVELRuntime.setThreadDebugger(testDebugger);
assertEquals(1, MVEL.executeDebugger(compiled, null, new MapVariableResolverFactory(createTestMap())));
}
public void testBreakpoints4() {
String expression = "System.out.println('foo');\n" +
"a = new Foo();\n" +
"update (a) { name = 'bar' };\n" +
"System.out.println('name:' + a.name);\n" +
"return a.name;";
Map<String, Interceptor> interceptors = new HashMap<String, Interceptor>();
Map<String, Macro> macros = new HashMap<String, Macro>();
interceptors.put("Update", new Interceptor() {
public int doBefore(ASTNode node, VariableResolverFactory factory) {
((WithNode) node).getNestedStatement().getValue(null,
factory);
System.out.println("fired update interceptor -- before");
return 0;
}
public int doAfter(Object val, ASTNode node, VariableResolverFactory factory) {
System.out.println("fired update interceptor -- after");
return 0;
}
});
macros.put("update", new Macro() {
public String doMacro() {
return "@Update with";
}
});
expression = parseMacros(expression, macros);
ExpressionCompiler compiler = new ExpressionCompiler(expression);
compiler.setDebugSymbols(true);
ParserContext ctx = new ParserContext();
ctx.setSourceFile("test2.mv");
ctx.addImport("Foo", Foo.class);
ctx.setInterceptors(interceptors);
CompiledExpression compiled = compiler.compile(ctx);
System.out.println("\nExpression:------------");
System.out.println(expression);
System.out.println("------------");
System.out.println(DebugTools.decompile(compiled));
MVELRuntime.registerBreakpoint("test2.mv", 3);
MVELRuntime.registerBreakpoint("test2.mv", 4);
MVELRuntime.registerBreakpoint("test2.mv", 5);
Debugger testDebugger = new Debugger() {
public int onBreak(Frame frame) {
System.out.println("Breakpoint [source:" + frame.getSourceName() + "; line:" + frame.getLineNumber() + "]");
return 0;
}
};
MVELRuntime.setThreadDebugger(testDebugger);
assertEquals("bar", MVEL.executeDebugger(compiled, null, new MapVariableResolverFactory(createTestMap())));
}
public void testBreakpoints5() {
String expression = "System.out.println('foo');\r\n" +
"a = new Foo();\r\n" +
"a.name = 'bar';\r\n" +
"foo.happy();\r\n" +
"System.out.println( 'name:' + a.name ); \r\n" +
"System.out.println( 'name:' + a.name ); \r\n" +
"System.out.println( 'name:' + a.name ); \r\n" +
"return a.name;";
Map<String, Interceptor> interceptors = new HashMap<String, Interceptor>();
Map<String, Macro> macros = new HashMap<String, Macro>();
interceptors.put("Update", new Interceptor() {
public int doBefore(ASTNode node, VariableResolverFactory factory) {
((WithNode) node).getNestedStatement().getValue(null,
factory);
System.out.println("fired update interceptor -- before");
return 0;
}
public int doAfter(Object val, ASTNode node, VariableResolverFactory factory) {
System.out.println("fired update interceptor -- after");
return 0;
}
});
macros.put("update", new Macro() {
public String doMacro() {
return "@Update with";
}
});
expression = parseMacros(expression, macros);
ExpressionCompiler compiler = new ExpressionCompiler(expression);
compiler.setDebugSymbols(true);
ParserContext ctx = new ParserContext();
ctx.setSourceFile("test2.mv");
ctx.addImport("Foo", Foo.class);
ctx.setInterceptors(interceptors);
CompiledExpression compiled = compiler.compile(ctx);
System.out.println("\nExpression:------------");
System.out.println(expression);
System.out.println("------------");
System.out.println(DebugTools.decompile(compiled));
MVELRuntime.registerBreakpoint("test2.mv", 1);
Debugger testDebugger = new Debugger() {
public int onBreak(Frame frame) {
System.out.println("Breakpoint [source:" + frame.getSourceName() + "; line:" + frame.getLineNumber() + "]");
return Debugger.STEP_OVER;
}
};
MVELRuntime.setThreadDebugger(testDebugger);
System.out.println("\n==RUN==\n");
assertEquals("bar", MVEL.executeDebugger(compiled, null, new MapVariableResolverFactory(createTestMap())));
}
public void testDebugSymbolsWithWindowsLinedEndings() throws Exception {
String expr = " System.out.println( \"a1\" );\r\n" +
" System.out.println( \"a2\" );\r\n" +
" System.out.println( \"a3\" );\r\n" +
" System.out.println( \"a4\" );\r\n";
ExpressionCompiler compiler = new ExpressionCompiler(expr);
compiler.setDebugSymbols(true);
ParserContext ctx = new ParserContext();
ctx.setStrictTypeEnforcement(true);
ctx.setDebugSymbols(true);
ctx.setSourceFile("mysource");
Serializable compiledExpression = compiler.compile(ctx);
String s = org.mvel.debug.DebugTools.decompile(compiledExpression);
System.out.println(s);
int fromIndex = 0;
int count = 0;
while ((fromIndex = s.indexOf("DEBUG_SYMBOL", fromIndex + 1)) > -1) {
count++;
}
assertEquals(4, count);
}
public void testDebugSymbolsWithUnixLinedEndings() throws Exception {
String expr = " System.out.println( \"a1\" );\n" +
" System.out.println( \"a2\" );\n" +
" System.out.println( \"a3\" );\n" +
" System.out.println( \"a4\" );\n";
ExpressionCompiler compiler = new ExpressionCompiler(expr);
compiler.setDebugSymbols(true);
ParserContext ctx = new ParserContext();
ctx.setStrictTypeEnforcement(true);
ctx.setDebugSymbols(true);
ctx.setSourceFile("mysource");
Serializable compiledExpression = compiler.compile(ctx);
String s = org.mvel.debug.DebugTools.decompile(compiledExpression);
int fromIndex = 0;
int count = 0;
while ((fromIndex = s.indexOf("DEBUG_SYMBOL", fromIndex + 1)) > -1) {
count++;
}
assertEquals(4, count);
}
public void testDebugSymbolsWithMixedLinedEndings() throws Exception {
String expr = " System.out.println( \"a1\" );\n" +
" System.out.println( \"a2\" );\r\n" +
" System.out.println( \"a3\" );\n" +
" System.out.println( \"a4\" );\r\n";
ExpressionCompiler compiler = new ExpressionCompiler(expr);
compiler.setDebugSymbols(true);
ParserContext ctx = new ParserContext();
ctx.setStrictTypeEnforcement(true);
ctx.setDebugSymbols(true);
ctx.setSourceFile("mysource");
Serializable compiledExpression = compiler.compile(ctx);
String s = org.mvel.debug.DebugTools.decompile(compiledExpression);
System.out.println(s);
int fromIndex = 0;
int count = 0;
while ((fromIndex = s.indexOf("DEBUG_SYMBOL", fromIndex + 1)) > -1) {
count++;
}
assertEquals(4, count);
}
public void testReflectionCache() {
assertEquals("happyBar", test("foo.happy(); foo.bar.happy()"));
}
public void testVarInputs() {
ExpressionCompiler compiler = new ExpressionCompiler("test != foo && bo.addSomething(trouble); String bleh = foo; twa = bleh;");
compiler.compile();
ParserContext pCtx = compiler.getParserContextState();
assertEquals(4, pCtx.getInputs().size());
assertTrue(pCtx.getInputs().containsKey("test"));
assertTrue(pCtx.getInputs().containsKey("foo"));
assertTrue(pCtx.getInputs().containsKey("bo"));
assertTrue(pCtx.getInputs().containsKey("trouble"));
assertEquals(2, pCtx.getVariables().size());
assertTrue(pCtx.getVariables().containsKey("bleh"));
assertTrue(pCtx.getVariables().containsKey("twa"));
assertEquals(String.class, pCtx.getVarOrInputType("bleh"));
}
public void testVarInputs2() {
ExpressionCompiler compiler = new ExpressionCompiler("test != foo && bo.addSomething(trouble); String bleh = foo; twa = bleh;");
ParserContext ctx = new ParserContext();
ctx.setRetainParserState(true);
compiler.compile(ctx);
System.out.println(ctx.getVarOrInputType("bleh"));
}
public void testVarInputs3() {
ExpressionCompiler compiler = new ExpressionCompiler("addresses['home'].street");
compiler.compile();
assertFalse(compiler.getParserContextState().getInputs().keySet().contains("home"));
}
public void testVarInputs4() {
ExpressionCompiler compiler = new ExpressionCompiler("System.out.println( message );");
compiler.compile();
assertTrue(compiler.getParserContextState().getInputs().keySet().contains("message"));
}
public void testAnalyzer() {
ExpressionCompiler compiler = new ExpressionCompiler("order.id == 10");
compiler.compile();
for (String input : compiler.getParserContextState().getInputs().keySet()) {
System.out.println("input>" + input);
}
assertEquals(1, compiler.getParserContextState().getInputs().size());
assertTrue(compiler.getParserContextState().getInputs().containsKey("order"));
}
public void testClassImportViaFactory() {
MapVariableResolverFactory mvf = new MapVariableResolverFactory(createTestMap());
ClassImportResolverFactory classes = new ClassImportResolverFactory();
classes.addClass(HashMap.class);
ResolverTools.appendFactory(mvf, classes);
Serializable compiled = compileExpression("HashMap map = new HashMap()", classes.getImportedClasses());
assertTrue(executeExpression(compiled, mvf) instanceof HashMap);
}
public void testSataticClassImportViaFactory() {
MapVariableResolverFactory mvf = new MapVariableResolverFactory(createTestMap());
ClassImportResolverFactory classes = new ClassImportResolverFactory();
classes.addClass(Person.class);
ResolverTools.appendFactory(mvf, classes);
Serializable compiled = compileExpression("p = new Person('tom'); return p.name;", classes.getImportedClasses());
assertEquals("tom", executeExpression(compiled, mvf));
}
public void testSataticClassImportViaFactoryAndWithModification() {
MapVariableResolverFactory mvf = new MapVariableResolverFactory(createTestMap());
ClassImportResolverFactory classes = new ClassImportResolverFactory();
classes.addClass(Person.class);
ResolverTools.appendFactory(mvf, classes);
Serializable compiled = compileExpression("p = new Person('tom'); p.age = 20; with( p ) { age = p.age + 1 }; return p.age;", classes.getImportedClasses());
assertEquals(21, executeExpression(compiled, mvf));
}
public void testCheeseConstructor() {
MapVariableResolverFactory mvf = new MapVariableResolverFactory(createTestMap());
ClassImportResolverFactory classes = new ClassImportResolverFactory();
classes.addClass(Cheese.class);
ResolverTools.appendFactory(mvf, classes);
Serializable compiled = compileExpression("cheese = new Cheese(\"cheddar\", 15);", classes.getImportedClasses());
assertTrue(executeExpression(compiled, mvf) instanceof Cheese);
}
public void testInterceptors() {
Interceptor testInterceptor = new Interceptor() {
public int doBefore(ASTNode node, VariableResolverFactory factory) {
System.out.println("BEFORE Node: " + node.getName());
return 0;
}
public int doAfter(Object val, ASTNode node, VariableResolverFactory factory) {
System.out.println("AFTER Node: " + node.getName());
return 0;
}
};
Map<String, Interceptor> interceptors = new HashMap<String, Interceptor>();
interceptors.put("test", testInterceptor);
Serializable compiled = compileExpression("@test System.out.println('MIDDLE');", null, interceptors);
executeExpression(compiled);
}
public void testMacroSupport() {
Map<String, Object> vars = new HashMap<String, Object>();
vars.put("foo", new Foo());
Map<String, Interceptor> interceptors = new HashMap<String, Interceptor>();
Map<String, Macro> macros = new HashMap<String, Macro>();
interceptors.put("Modify", new Interceptor() {
public int doBefore(ASTNode node, VariableResolverFactory factory) {
((WithNode) node).getNestedStatement().getValue(null,
factory);
factory.createVariable("mod", "FOOBAR!");
return 0;
}
public int doAfter(Object val, ASTNode node, VariableResolverFactory factory) {
return 0;
}
});
macros.put("modify", new Macro() {
public String doMacro() {
return "@Modify with";
}
});
ExpressionCompiler compiler = new ExpressionCompiler(parseMacros("modify (foo) { aValue = 'poo' }; mod", macros));
compiler.setDebugSymbols(true);
ParserContext ctx = new ParserContext(null, interceptors, null);
ctx.setSourceFile("test.mv");
CompiledExpression compiled = compiler.compile(ctx);
assertEquals("FOOBAR!", MVEL.executeExpression(compiled, null, vars));
}
public void testMacroSupportWithDebugging() {
Map<String, Object> vars = new HashMap<String, Object>();
vars.put("foo", new Foo());
Map<String, Interceptor> interceptors = new HashMap<String, Interceptor>();
Map<String, Macro> macros = new HashMap<String, Macro>();
interceptors.put("Modify", new Interceptor() {
public int doBefore(ASTNode node, VariableResolverFactory factory) {
((WithNode) node).getNestedStatement().getValue(null,
factory);
factory.createVariable("mod", "FOOBAR!");
return 0;
}
public int doAfter(Object val, ASTNode node, VariableResolverFactory factory) {
return 0;
}
});
macros.put("modify", new Macro() {
public String doMacro() {
return "@Modify with";
}
});
ExpressionCompiler compiler = new ExpressionCompiler(
parseMacros(
"System.out.println('hello');\n" +
"System.out.println('bye');\n" +
"modify (foo) { aValue = 'poo', \n" +
" aValue = 'poo' };\n mod", macros)
);
compiler.setDebugSymbols(true);
ParserContext ctx = new ParserContext(null, interceptors, null);
ctx.setSourceFile("test.mv");
CompiledExpression compiled = compiler.compile(ctx);
MVELRuntime.setThreadDebugger(new Debugger() {
public int onBreak(Frame frame) {
System.out.println(frame.getSourceName() + ":" + frame.getLineNumber());
return Debugger.STEP;
}
});
MVELRuntime.registerBreakpoint("test.mv", 3);
System.out.println(DebugTools.decompile(compiled
));
assertEquals("FOOBAR!", MVEL.executeDebugger(compiled, null, new MapVariableResolverFactory(vars)));
}
public void testExecuteCoercionTwice() {
Map<String, Object> vars = new HashMap<String, Object>();
vars.put("foo", new Foo());
vars.put("$value", new Long(5));
ExpressionCompiler compiler = new ExpressionCompiler("with (foo) { countTest = $value };");
compiler.setDebugSymbols(true);
ParserContext ctx = new ParserContext();
ctx.setSourceFile("test.mv");
CompiledExpression compiled = compiler.compile(ctx);
MVEL.executeExpression(compiled, null, vars);
MVEL.executeExpression(compiled, null, vars);
}
public void testComments() {
assertEquals(10, test("// This is a comment\n5 + 5"));
}
public void testComments2() {
assertEquals(20, test("10 + 10; // This is a comment"));
}
public void testComments3() {
assertEquals(30, test("/* This is a test of\r\n" +
"MVEL's support for\r\n" +
"multi-line comments\r\n" +
"*/\r\n 15 + 15"));
}
public void testComments4() {
assertEquals(((10 + 20) * 2) - 10, test("/** This is a fun test script **/\r\n" +
"a = 10;\r\n" +
"/**\r\n" +
"* Here is a useful variable\r\n" +
"*/\r\n" +
"b = 20; // set b to '20'\r\n" +
"return ((a + b) * 2) - 10;\r\n" +
"// last comment\n"));
}
public void testSubtractNoSpace1() {
assertEquals(59, test("hour-1"));
}
public void testStrictTypingCompilation() {
ExpressionCompiler compiler = new ExpressionCompiler("a.foo;\nb.foo;\n x = 5");
ParserContext ctx = new ParserContext();
ctx.setStrictTypeEnforcement(true);
try {
compiler.compile(ctx);
}
catch (CompileException e) {
e.printStackTrace();
assertEquals(2, e.getErrors().size());
return;
}
assertTrue(false);
}
public void testStrictStaticMethodCall() {
ExpressionCompiler compiler = new ExpressionCompiler("Bar.staticMethod()");
ParserContext ctx = new ParserContext();
ctx.addImport("Bar", Bar.class);
ctx.setStrictTypeEnforcement(true);
Serializable s = compiler.compile(ctx);
DebugTools.decompile(s);
assertEquals(1, executeExpression(s));
}
public void testStrictTypingCompilation2() throws Exception {
ParserContext ctx = new ParserContext();
//noinspection RedundantArrayCreation
ctx.addImport("getRuntime", new MethodStub(Runtime.class.getMethod("getRuntime", new Class[]{})));
ctx.setStrictTypeEnforcement(true);
ExpressionCompiler compiler = new ExpressionCompiler("getRuntime()");
StaticMethodImportResolverFactory si = new StaticMethodImportResolverFactory(ctx);
Serializable expression = compiler.compile(ctx);
serializationTest(expression);
assertTrue(executeExpression(expression, si) instanceof Runtime);
}
public void testStrictTypingCompilation3() throws NoSuchMethodException {
ParserContext ctx = new ParserContext();
ctx.setStrictTypeEnforcement(true);
ExpressionCompiler compiler =
new ExpressionCompiler("message='Hello';b=7;\nSystem.out.println(message + ';' + b);\n" +
"System.out.println(message + ';' + b); b");
assertEquals(7, executeExpression(compiler.compile(ctx), new DefaultLocalVariableResolverFactory()));
}
public void testStrictTypingCompilation4() throws NoSuchMethodException {
ParserContext ctx = new ParserContext();
ctx.addImport(Foo.class);
ctx.setStrictTypeEnforcement(true);
ExpressionCompiler compiler =
new ExpressionCompiler("x_a = new Foo()");
compiler.compile(ctx);
assertEquals(Foo.class, ctx.getVariables().get("x_a"));
}
public void testProvidedExternalTypes() {
ExpressionCompiler compiler = new ExpressionCompiler("foo.bar");
ParserContext ctx = new ParserContext();
ctx.setStrictTypeEnforcement(true);
ctx.addInput("foo", Foo.class);
compiler.compile(ctx);
}
public void testEqualityRegression() {
ExpressionCompiler compiler = new ExpressionCompiler("price == (new Integer( 5 ) + 5 ) ");
compiler.compile();
}
public void testEvaluationRegression() {
ExpressionCompiler compiler = new ExpressionCompiler("(p.age * 2)");
compiler.compile();
assertTrue(compiler.getParserContextState().getInputs().containsKey("p"));
}
public void testAssignmentRegression() {
ExpressionCompiler compiler = new ExpressionCompiler("total = total + $cheese.price");
compiler.compile();
}
public void testTypeRegression() {
ExpressionCompiler compiler = new ExpressionCompiler("total = 0");
ParserContext ctx = new ParserContext();
ctx.setStrictTypeEnforcement(true);
compiler.compile(ctx);
assertEquals(Integer.class,
compiler.getParserContextState().getVarOrInputType("total"));
}
public void testDateComparison() {
// map.put("dt1", new Date(currentTimeMillis() - 100000));
// map.put("dt2", new Date(currentTimeMillis()));
assertTrue((Boolean) test("dt1 < dt2"));
}
public void testDynamicDeop() {
Serializable s = MVEL.compileExpression("name");
assertEquals("dog", MVEL.executeExpression(s, new Foo()));
assertEquals("dog", MVEL.executeExpression(s, new Foo().getBar()));
}
public void testVirtProperty() {
// OptimizerFactory.setDefaultOptimizer("ASM");
Map<String, Object> testMap = new HashMap<String, Object>();
testMap.put("test", "foo");
Map<String, Object> vars = new HashMap<String, Object>();
vars.put("mp", testMap);
assertEquals("bar", MVEL.executeExpression(compileExpression("mp.test = 'bar'; mp.test"), vars));
}
public void testMapPropertyCreateCondensed() {
assertEquals("foo", test("map = new java.util.HashMap(); map['test'] = 'foo'; map['test'];"));
}
public void testClassLiteral() {
assertEquals(String.class, test("java.lang.String"));
}
public void testDeepMethod() {
assertEquals(false, test("foo.bar.testList.add(new String()); foo.bar.testList == empty"));
}
public void testArrayAccessorAssign() {
assertEquals("foo", test("a = {'f00', 'bar'}; a[0] = 'foo'; a[0]"));
}
public void testListAccessorAssign() {
assertEquals("bar", test("a = new java.util.ArrayList(); a.add('foo'); a.add('BAR'); a[1] = 'bar'; a[1]"));
}
public void testBracketInString() {
test("System.out.println('1)your guess was:');");
}
public void testNesting() {
assertEquals("foo", test("new String(new String(new String(\"foo\")));"));
}
public void testDeepPropertyAdd() {
assertEquals(10, test("foo.countTest+ 10"));
}
public void testDeepAssignmentIncrement() {
assertEquals(true, test("foo.countTest += 5; if (foo.countTest == 5) { foo.countTest = 0; return true; } else { foo.countTest = 0; return false; }"));
}
public void testDeepAssignmentWithBlock() {
assertEquals(true, test("with (foo) { countTest += 5 }; if (foo.countTest == 5) { foo.countTest = 0; return true; } else { foo.countTest = 0; return false; }"));
}
public void testTypeCast() {
assertEquals("10", test("(String) 10"));
}
public void testMapAccessSemantics() {
Map<String, Object> outermap = new HashMap<String, Object>();
Map<String, Object> innermap = new HashMap<String, Object>();
innermap.put("test", "foo");
outermap.put("innermap", innermap);
assertEquals("foo", testCompiledSimple("innermap['test']", outermap, null));
}
public void testMapBindingSemantics() {
Map<String, Object> outermap = new HashMap<String, Object>();
Map<String, Object> innermap = new HashMap<String, Object>();
innermap.put("test", "foo");
outermap.put("innermap", innermap);
MVEL.setProperty(outermap, "innermap['test']", "bar");
assertEquals("bar", testCompiledSimple("innermap['test']", outermap, null));
}
public void testMapNestedInsideList() {
ParserContext ctx = new ParserContext();
ctx.addImport("User", User.class);
ExpressionCompiler compiler = new ExpressionCompiler("users = [ 'darth' : new User('Darth', 'Vadar'),\n'bobba' : new User('Bobba', 'Feta') ]; [ users.get('darth'), users.get('bobba') ]");
Serializable s = compiler.compile(ctx);
List list = (List) MVEL.executeExpression(s);
User user = (User) list.get(0);
assertEquals("Darth", user.getFirstName());
user = (User) list.get(1);
assertEquals("Bobba", user.getFirstName());
compiler = new ExpressionCompiler("users = [ 'darth' : new User('Darth', 'Vadar'),\n'bobba' : new User('Bobba', 'Feta') ]; [ users['darth'], users['bobba'] ]");
s = compiler.compile(ctx);
list = (List) MVEL.executeExpression(s);
user = (User) list.get(0);
assertEquals("Darth", user.getFirstName());
user = (User) list.get(1);
assertEquals("Bobba", user.getFirstName());
}
public void testListNestedInsideList() {
ParserContext ctx = new ParserContext();
ctx.addImport("User", User.class);
ExpressionCompiler compiler = new ExpressionCompiler("users = [ new User('Darth', 'Vadar'), new User('Bobba', 'Feta') ]; [ users.get( 0 ), users.get( 1 ) ]");
Serializable s = compiler.compile(ctx);
List list = (List) MVEL.executeExpression(s);
User user = (User) list.get(0);
assertEquals("Darth", user.getFirstName());
user = (User) list.get(1);
assertEquals("Bobba", user.getFirstName());
compiler = new ExpressionCompiler("users = [ new User('Darth', 'Vadar'), new User('Bobba', 'Feta') ]; [ users[0], users[1] ]");
s = compiler.compile(ctx);
list = (List) MVEL.executeExpression(s);
user = (User) list.get(0);
assertEquals("Darth", user.getFirstName());
user = (User) list.get(1);
assertEquals("Bobba", user.getFirstName());
}
public void testSetSemantics() {
Bar bar = new Bar();
Foo foo = new Foo();
assertEquals("dog", MVEL.getProperty("name", bar));
assertEquals("dog", MVEL.getProperty("name", foo));
}
public void testMapBindingSemantics2() {
Map<String, Object> outermap = new HashMap<String, Object>();
Map<String, Object> innermap = new HashMap<String, Object>();
innermap.put("test", "foo");
outermap.put("innermap", innermap);
Serializable s = MVEL.compileSetExpression("innermap['test']");
MVEL.executeSetExpression(s, outermap, "bar");
assertEquals("bar", testCompiledSimple("innermap['test']", outermap, null));
}
public void testDynamicImports() {
ParserContext ctx = new ParserContext();
ctx.addPackageImport("java.util");
ExpressionCompiler compiler = new ExpressionCompiler("HashMap");
Serializable s = compiler.compile(ctx);
assertEquals(HashMap.class, MVEL.executeExpression(s));
compiler = new ExpressionCompiler("map = new HashMap(); map.size()");
s = compiler.compile(ctx);
assertEquals(0, MVEL.executeExpression(s, new DefaultLocalVariableResolverFactory()));
}
public void testDynamicImportsInList() {
ParserContext ctx = new ParserContext();
ctx.addPackageImport("org.mvel.tests.core.res");
ExpressionCompiler compiler = new ExpressionCompiler("[ new User('Bobba', 'Feta') ]");
Serializable s = compiler.compile(ctx);
List list = (List) MVEL.executeExpression(s);
User user = (User) list.get(0);
assertEquals("Bobba", user.getFirstName());
}
public void testDynamicImportsInMap() {
ParserContext ctx = new ParserContext();
ctx.addPackageImport("org.mvel.tests.core.res");
ExpressionCompiler compiler = new ExpressionCompiler("[ 'bobba' : new User('Bobba', 'Feta') ]");
Serializable s = compiler.compile(ctx);
Map map = (Map) MVEL.executeExpression(s);
User user = (User) map.get("bobba");
assertEquals("Bobba", user.getFirstName());
}
public void testDynamicImportsOnNestedExpressions() {
ParserContext ctx = new ParserContext();
ctx.addPackageImport("org.mvel.tests.core.res");
ExpressionCompiler compiler = new ExpressionCompiler("new Cheesery(\"bobbo\", new Cheese(\"cheddar\", 15))");
Serializable s = compiler.compile(ctx);
Cheesery p1 = new Cheesery("bobbo", new Cheese("cheddar", 15));
Cheesery p2 = (Cheesery) MVEL.executeExpression(s, new DefaultLocalVariableResolverFactory());
assertEquals(p1, p2);
}
public void testDynamicImportsWithNullConstructorParam() {
ParserContext ctx = new ParserContext();
ctx.addPackageImport("org.mvel.tests.core.res");
ExpressionCompiler compiler = new ExpressionCompiler("new Cheesery(\"bobbo\", null)");
Serializable s = compiler.compile(ctx);
Cheesery p1 = new Cheesery("bobbo", null);
Cheesery p2 = (Cheesery) MVEL.executeExpression(s, new DefaultLocalVariableResolverFactory());
assertEquals(p1, p2);
}
public void testDynamicImportsWithIdentifierSameAsClassWithDiffCase() {
ParserContext ctx = new ParserContext();
ctx.addPackageImport("org.mvel.tests.core.res");
ctx.setStrictTypeEnforcement(false);
ExpressionCompiler compiler = new ExpressionCompiler("bar.add(\"hello\")");
compiler.compile(ctx);
}
public void testTypedAssignment() {
assertEquals("foobar", test("java.util.Map map = new java.util.HashMap(); map.put('conan', 'foobar'); map['conan'];"));
}
public void testFQCNwithStaticInList() {
assertEquals(Integer.MIN_VALUE, test("list = [java.lang.Integer.MIN_VALUE]; list[0]"));
}
public void testPrecedenceOrder() {
assertTrue((Boolean) test("5 > 6 && 2 < 1 || 10 > 9"));
}
public void testPrecedenceOrder1() {
String ex = "50 > 60 && 20 < 10 || 100 > 90";
System.out.println("Expression: " + ex);
assertTrue((Boolean) MVEL.eval(ex));
}
@SuppressWarnings({"unchecked"})
public void testDifferentImplSameCompile() {
Serializable compiled = compileExpression("a.funMap.hello");
Map testMap = new HashMap();
for (int i = 0; i < 100; i++) {
Base b = new Base();
b.funMap.put("hello", "dog");
testMap.put("a", b);
assertEquals("dog", executeExpression(compiled, testMap));
b = new Base();
b.funMap.put("hello", "cat");
testMap.put("a", b);
assertEquals("cat", executeExpression(compiled, testMap));
}
}
@SuppressWarnings({"unchecked"})
public void testInterfaceMethodCallWithSpace() {
Serializable compiled = compileExpression("drools.retract (cheese)");
Map map = new HashMap();
DefaultKnowledgeHelper helper = new DefaultKnowledgeHelper();
map.put("drools", helper);
Cheese cheese = new Cheese("stilton", 15);
map.put("cheese", cheese);
executeExpression(compiled, map);
assertSame(cheese, helper.retracted.get(0));
}
@SuppressWarnings({"unchecked"})
public void testInterfaceMethodCallWithMacro() {
Map macros = new HashMap(1);
macros.put("retract",
new Macro() {
public String doMacro() {
return "drools.retract";
}
});
Serializable compiled = compileExpression(parseMacros("retract(cheese)", macros));
Map map = new HashMap();
DefaultKnowledgeHelper helper = new DefaultKnowledgeHelper();
map.put("drools", helper);
Cheese cheese = new Cheese("stilton", 15);
map.put("cheese", cheese);
executeExpression(compiled, map);
assertSame(cheese, helper.retracted.get(0));
}
@SuppressWarnings({"UnnecessaryBoxing"})
public void testToList() {
String text = "misc.toList(foo.bar.name, 'hello', 42, ['key1' : 'value1', c : [ foo.bar.age, 'car', 42 ]], [42, [c : 'value1']] )";
List list = (List) test(text);
assertSame("dog", list.get(0));
assertEquals("hello", list.get(1));
assertEquals(new Integer(42), list.get(2));
Map map = (Map) list.get(3);
assertEquals("value1", map.get("key1"));
List nestedList = (List) map.get("cat");
assertEquals(14, nestedList.get(0));
assertEquals("car", nestedList.get(1));
assertEquals(42, nestedList.get(2));
nestedList = (List) list.get(4);
assertEquals(42, nestedList.get(0));
map = (Map) nestedList.get(1);
assertEquals("value1", map.get("cat"));
}
@SuppressWarnings({"UnnecessaryBoxing"})
public void testToListStrictMode() {
String text = "misc.toList(foo.bar.name, 'hello', 42, ['key1' : 'value1', c : [ foo.bar.age, 'car', 42 ]], [42, [c : 'value1']] )";
ParserContext ctx = new ParserContext();
ctx.addInput("misc", MiscTestClass.class);
ctx.addInput("foo", Foo.class);
ctx.addInput("c", String.class);
ctx.setStrictTypeEnforcement(true);
ExpressionCompiler compiler = new ExpressionCompiler(text);
Serializable expr = compiler.compile(ctx);
List list = (List) executeExpression(expr, createTestMap());
assertSame("dog", list.get(0));
assertEquals("hello", list.get(1));
assertEquals(new Integer(42), list.get(2));
Map map = (Map) list.get(3);
assertEquals("value1", map.get("key1"));
List nestedList = (List) map.get("cat");
assertEquals(14, nestedList.get(0));
assertEquals("car", nestedList.get(1));
assertEquals(42, nestedList.get(2));
nestedList = (List) list.get(4);
assertEquals(42, nestedList.get(0));
map = (Map) nestedList.get(1);
assertEquals("value1", map.get("cat"));
}
public void testParsingStability1() {
assertEquals(true, test("( order.number == 1 || order.number == ( 1+1) || order.number == $id )"));
}
public void testParsingStability2() {
ExpressionCompiler compiler = new ExpressionCompiler("( dim.height == 1 || dim.height == ( 1+1) || dim.height == x )");
Map<String, Object> imports = new HashMap<String, Object>();
imports.put("java.awt.Dimension", Dimension.class);
final ParserContext parserContext = new ParserContext(imports,
null,
"sourceFile");
parserContext.setStrictTypeEnforcement(false);
compiler.compile(parserContext);
}
public void testParsingStability3() {
assertEquals(false, test("!( [\"X\", \"Y\"] contains \"Y\" )"));
}
public void testParsingStability4() {
assertEquals(true, test("vv=\"Edson\"; !(vv ~= \"Mark\")"));
}
public void testConcatWithLineBreaks() {
ExpressionCompiler parser = new ExpressionCompiler("\"foo\"+\n\"bar\"");
ParserContext ctx = new ParserContext();
ctx.setDebugSymbols(true);
ctx.setSourceFile("source.mv");
Serializable c = parser.compile(ctx);
assertEquals("foobar", MVEL.executeExpression(c));
}
/**
* Community provided test cases
*/
@SuppressWarnings({"unchecked"})
public void testCalculateAge() {
Calendar c1 = Calendar.getInstance();
c1.set(1999, 0, 10); // 1999 jan 20
Map objectMap = new HashMap(1);
Map propertyMap = new HashMap(1);
propertyMap.put("GEBDAT", c1.getTime());
objectMap.put("EV_VI_ANT1", propertyMap);
assertEquals("N", testCompiledSimple("new org.mvel.tests.core.res.PDFFieldUtil().calculateAge(EV_VI_ANT1.GEBDAT) >= 25 ? 'Y' : 'N'"
, null, objectMap));
}
/**
* Provided by: Alex Roytman
*/
public void testMethodResolutionWithNullParameter() {
Context ctx = new Context();
ctx.setBean(new Bean());
Map<String, Object> vars = new HashMap<String, Object>();
System.out.println("bean.today: " + eval("bean.today", ctx, vars));
System.out.println("formatDate(bean.today): " + eval("formatDate(bean.today)", ctx, vars));
//calling method with string param with null parameter works
System.out.println("formatString(bean.nullString): " + eval("formatString(bean.nullString)", ctx, vars));
System.out.println("bean.myDate = bean.nullDate: " + eval("bean.myDate = bean.nullDate; return bean.nullDate;", ctx, vars));
//calling method with Date param with null parameter fails
System.out.println("formatDate(bean.myDate): " + eval("formatDate(bean.myDate)", ctx, vars));
//same here
System.out.println(eval("formatDate(bean.nullDate)", ctx, vars));
}
/**
* Provided by: Phillipe Ombredanne
*/
public void testCompileParserContextShouldNotLoopIndefinitelyOnValidJavaExpression() {
String expr = " System.out.println( message );\n" + //
"m.setMessage( \"Goodbye cruel world\" );\n" + //
"System.out.println(m.getStatus());\n" + //
"m.setStatus( Message.GOODBYE );\n";
ExpressionCompiler compiler = new ExpressionCompiler(expr);
ParserContext context = new ParserContext();
context.setStrictTypeEnforcement(false);
context.addImport("Message", Message.class);
context.addInput("System", void.class);
context.addInput("message", Object.class);
context.addInput("m", Object.class);
compiler.compile(context);
}
public void testStaticNested() {
assertEquals(1, eval("org.mvel.tests.core.AbstractTest$Message.GOODBYE", new HashMap()));
}
public void testStaticNestedWithImport() {
String expr = "Message.GOODBYE;\n";
ExpressionCompiler compiler = new ExpressionCompiler(expr);
ParserContext context = new ParserContext();
context.setStrictTypeEnforcement(false);
context.addImport("Message", Message.class);
Serializable compiledExpression = compiler.compile(context);
assertEquals(1, MVEL.executeExpression(compiledExpression));
}
public void testStaticNestedWithMethodCall() {
String expr = "item = new Item( \"Some Item\"); $msg.addItem( item ); return $msg";
ExpressionCompiler compiler = new ExpressionCompiler(expr);
ParserContext context = new ParserContext();
context.setStrictTypeEnforcement(false);
context.addImport("Message", Message.class);
context.addImport("Item", Item.class);
Serializable compiledExpression = compiler.compile(context);
Map vars = new HashMap();
vars.put("$msg", new Message());
Message msg = (Message) MVEL.executeExpression(compiledExpression, vars);
Item item = (Item) msg.getItems().get(0);
assertEquals("Some Item", item.getName());
}
public void testsequentialAccessorsThenMethodCall() {
String expr = "System.out.println(drools.workingMemory); drools.workingMemory.ruleBase.removeRule(\"org.drools.examples\", \"some rule\"); ";
ExpressionCompiler compiler = new ExpressionCompiler(expr);
ParserContext context = new ParserContext();
context.setStrictTypeEnforcement(true);
context.addInput("drools", KnowledgeHelper.class);
RuleBase ruleBase = new RuleBaseImpl();
WorkingMemory wm = new WorkingMemoryImpl(ruleBase);
KnowledgeHelper drools = new DefaultKnowledgeHelper(wm);
Serializable compiledExpression = compiler.compile(context);
Map vars = new HashMap();
vars.put("drools", drools);
MVEL.executeExpression(compiledExpression, vars);
}
/**
* Provided by: Aadi Deshpande
*/
public void testPropertyVerfierShoudldNotLoopIndefinately() {
String expr = "\t\tmodel.latestHeadlines = $list;\n" +
"model.latestHeadlines.add( 0, (model.latestHeadlines[2]) );";
ExpressionCompiler compiler = new ExpressionCompiler(expr);
compiler.setVerifying(true);
ParserContext pCtx = new ParserContext();
pCtx.addInput("$list", List.class);
pCtx.addInput("model", Model.class);
compiler.compile(pCtx);
}
public void testCompileWithNewInsideMethodCall() {
String expr = " p.name = \"goober\";\n" +
" System.out.println(p.name);\n" +
" drools.insert(new Address(\"Latona\"));\n";
ExpressionCompiler compiler = new ExpressionCompiler(expr);
ParserContext context = new ParserContext();
context.setStrictTypeEnforcement(false);
context.addImport("Person", Person.class);
context.addImport("Address", Address.class);
context.addInput("p", Person.class);
context.addInput("drools", Drools.class);
compiler.compile(context);
}
/**
* Submitted by: cleverpig
*/
public void testBug4() {
ClassA A = new ClassA();
ClassB B = new ClassB();
System.out.println(MVEL.getProperty("date", A));
System.out.println(MVEL.getProperty("date", B));
}
/**
* Submitted by: Michael Neale
*/
public void testInlineCollectionParser1() {
assertEquals("q", ((Map) test("['Person.age' : [1, 2, 3, 4],'Person.rating' : 'q']")).get("Person.rating"));
assertEquals("q", ((Map) test("['Person.age' : [1, 2, 3, 4], 'Person.rating' : 'q']")).get("Person.rating"));
}
public void testIndexer() {
assertEquals("foobar", testCompiledSimple("import java.util.LinkedHashMap; LinkedHashMap map = new LinkedHashMap();" +
" map.put('a', 'foo'); map.put('b', 'bar'); s = ''; foreach (key : map.keySet()) { System.out.println(map[key]); s += map[key]; }; return s;", createTestMap()));
}
public void testLateResolveOfClass() {
ExpressionCompiler compiler = new ExpressionCompiler("System.out.println(new Foo());");
ParserContext ctx = new ParserContext();
ctx.addImport(Foo.class);
CompiledExpression s = compiler.compile(ctx);
compiler.removeParserContext();
System.out.println(MVEL.executeExpression(s));
}
public void testClassAliasing() {
assertEquals("foobar", test("Foo = String; new Foo('foobar')"));
}
public void testRandomExpression1() {
assertEquals("HelloWorld", test("if ((x15 = foo.bar) == foo.bar && x15 == foo.bar) { return 'HelloWorld'; } else { return 'GoodbyeWorld' } "));
}
public void testRandomExpression2() {
assertEquals(11, test("counterX = 0; foreach (item:{1,2,3,4,5,6,7,8,9,10}) { counterX++; }; return counterX + 1;"));
}
public void testRandomExpression3() {
assertEquals(0, test("counterX = 10; foreach (item:{1,1,1,1,1,1,1,1,1,1}) { counterX -= item; } return counterX;"));
}
public void testRandomExpression4() {
assertEquals(true, test("result = org.mvel.MVEL.eval('10 * 3'); result == (10 * 3);"));
}
public void testRandomExpression5() {
assertEquals(true, test("FooClassRef = foo.getClass(); fooInst = new FooClassRef(); name = org.mvel.MVEL.eval('name', fooInst); return name == 'dog'"));
}
public void testRandomExpression6() {
assertEquals(500, test("exprString = '250' + ' ' + '*' + ' ' + '2'; compiledExpr = org.mvel.MVEL.compileExpression(exprString);" +
" return org.mvel.MVEL.executeExpression(compiledExpr);"));
}
public void testRandomExpression7() {
assertEquals("FOOBAR", test("'foobar'.toUpperCase();"));
}
public void testRandomExpression8() {
assertEquals(true, test("'someString'.intern(); 'someString'.hashCode() == 'someString'.hashCode();"));
}
public void testRandomExpression9() {
assertEquals(false, test("_abc = 'someString'.hashCode(); _xyz = _abc + 1; _abc == _xyz"));
}
public void testRandomExpression10() {
assertEquals(false, test("(_abc = (_xyz = 'someString'.hashCode()) + 1); _abc == _xyz"));
}
/**
* Submitted by: Guerry Semones
*/
private Map<Object, Object> outerMap;
private Map<Object, Object> innerMap;
public void testAddIntToMapWithMapSyntax() throws Throwable {
outerMap = new HashMap<Object, Object>();
innerMap = new HashMap<Object, Object>();
outerMap.put("innerMap", innerMap);
// fails because mvel checks for 'foo' in the outerMap,
// rather than inside innerMap in outerMap
PropertyAccessor.set(outerMap, "innerMap['foo']", 42);
// mvel set it here
// assertEquals(42, outerMap.get("foo"));
// instead of here
assertEquals(42, innerMap.get("foo"));
}
public void testUpdateIntInMapWithMapSyntax() throws Throwable {
outerMap = new HashMap<Object, Object>();
innerMap = new HashMap<Object, Object>();
outerMap.put("innerMap", innerMap);
// fails because mvel checks for 'foo' in the outerMap,
// rather than inside innerMap in outerMap
innerMap.put("foo", 21);
PropertyAccessor.set(outerMap, "innerMap['foo']", 42);
// instead of updating it here
assertEquals(42, innerMap.get("foo"));
}
private HashMap<String, Object> context = new HashMap<String, Object>();
public void before() {
HashMap<String, Object> map = new HashMap<String, Object>();
MyBean bean = new MyBean();
bean.setVar(4);
map.put("bean", bean);
context.put("map", map);
}
public void testDeepProperty() {
before();
Serializable compiled = MVEL.compileExpression("map.bean.var");
Object obj = MVEL.executeExpression(compiled, context);
assertEquals(4, obj);
}
public void testDeepProperty2() {
before();
Serializable compiled = MVEL.compileExpression("map.bean.getVar()");
Object obj = MVEL.executeExpression(compiled, context);
assertEquals(4, obj);
}
public class MyBean {
int var;
public int getVar() {
return var;
}
public void setVar(int var) {
this.var = var;
}
}
public static class TargetClass {
private short _targetValue = 5;
public short getTargetValue() {
return _targetValue;
}
}
public void testNestedMethodCall() {
List elements = new ArrayList();
elements.add(new TargetClass());
Map variableMap = new HashMap();
variableMap.put("elements", elements);
eval(
"results = new java.util.ArrayList(); foreach (element : elements) { if( {5} contains element.targetValue.intValue()) { results.add(element); } }; results",
variableMap);
}
public void testBooleanEvaluation() {
assertEquals(true, test("true||false||false"));
}
public void testBooleanEvaluation2() {
assertEquals(true, test("equalityCheck(1,1)||fun||ackbar"));
}
/**
* Submitted by: Dimitar Dimitrov
*/
public void testFailing() {
Map<String, Object> map = new HashMap<String, Object>();
map.put("os", "windows");
assertTrue((Boolean) eval("os ~= 'windows|unix'", map));
}
public void testSuccess() {
Map<String, Object> map = new HashMap<String, Object>();
map.put("os", "windows");
assertTrue((Boolean) eval("'windows' ~= 'windows|unix'", map));
assertFalse((Boolean) eval("time ~= 'windows|unix'", new java.util.Date()));
}
public void testBooleanStrAppend() {
assertEquals("footrue", test("\"foo\" + true"));
}
public void testStringAppend() {
assertEquals("catbar", test("c + 'bar'"));
}
public void testConvertableTo() {
assertEquals(true, test("pi convertable_to Integer"));
}
public void testAssignPlus() {
assertEquals(10, test("xx0 = 5; xx0 += 4; xx0 + 1"));
}
public void testAssignPlus2() {
assertEquals(10, test("xx0 = 5; xx0 =+ 4; xx0 + 1"));
}
public void testAssignDiv() {
assertEquals(2, test("xx0 = 20; xx0 /= 10; xx0"));
}
public void testAssignMult() {
assertEquals(36, test("xx0 = 6; xx0 *= 6; xx0"));
}
public void testAssignSub() {
assertEquals(11, test("xx0 = 15; xx0 -= 4; xx0"));
}
public void testAssignSub2() {
assertEquals(-95, test("xx0 = 5; xx0 =- 100"));
}
public void testStaticWithExplicitParam() {
PojoStatic pojo = new PojoStatic("10");
eval("org.mvel.tests.core.res.AStatic.Process('10')", pojo, new HashMap());
}
public void testSimpleExpression() {
PojoStatic pojo = new PojoStatic("10");
eval("value!= null", pojo, new HashMap());
}
public void testStaticWithExpressionParam() {
PojoStatic pojo = new PojoStatic("10");
assertEquals("java.lang.String", eval("org.mvel.tests.core.res.AStatic.Process(value.getClass().getName().toString())", pojo));
}
public void testStringIndex() {
assertEquals(true, test("a = 'foobar'; a[4] == 'a'"));
}
public void testArrayConstructionSupport1() {
assertTrue(test("new String[5]") instanceof String[]);
}
public void testArrayConstructionSupport2() {
assertTrue((Boolean) test("xStr = new String[5]; xStr.size() == 5"));
}
public void testArrayConstructionSupport3() {
assertEquals("foo", test("xStr = new String[5][5]; xStr[4][0] = 'foo'; xStr[4][0]"));
}
public void testArrayConstructionSupport4() {
assertEquals(10, test("xStr = new String[5][10]; xStr[4][0] = 'foo'; xStr[4].length"));
}
public void testMath14() {
assertEquals(10 - 5 * 2 + 5 * 8 - 4, test("10-5*2 + 5*8-4"));
}
public void testMath15() {
String ex = "100-500*200 + 500*800-400";
// System.out.println("Expression: " + ex);
assertEquals(100 - 500 * 200 + 500 * 800 - 400, test(ex));
}
public void testMath16() {
String ex = "100-500*200*150 + 500*800-400";
assertEquals(100 - 500 * 200 * 150 + 500 * 800 - 400, test(ex));
}
public void testMath17() {
String ex = "(100 * 50) * 20 / 30 * 2";
// System.out.println("Expression: " + ex);
assertEquals((100d * 50d) * 20d / 30d * 2d, test(ex));
}
public void testMath18() {
String ex = "a = 100; b = 50; c = 20; d = 30; e = 2; (a * b) * c / d * e";
System.out.println("Expression: " + ex);
assertEquals((100d * 50d) * 20d / 30d * 2d, testCompiledSimple(ex, new HashMap()));
}
public void testMath19() {
String ex = "a = 100; b = 500; c = 200; d = 150; e = 500; f = 800; g = 400; a-b*c*d + e*f-g";
System.out.println("Expression: " + ex);
assertEquals(100 - 500 * 200 * 150 + 500 * 800 - 400, testCompiledSimple(ex, new HashMap()));
}
public void testMath32() {
String ex = "x = 20; y = 10; z = 5; x-y-z";
System.out.println("Expression: " + ex);
assertEquals(20 - 10 - 5, testCompiledSimple(ex, new HashMap()));
}
public void testMath33() {
String ex = "x = 20; y = 2; z = 2; x/y/z";
System.out.println("Expression: " + ex);
assertEquals(20 / 2 / 2, testCompiledSimple(ex, new HashMap()));
}
public void testMath20() {
String ex = "10-5*7-3*8-6";
System.out.println("Expression: " + ex);
assertEquals(10 - 5 * 7 - 3 * 8 - 6, test(ex));
}
public void testMath21() {
String expression = "100-50*70-30*80-60";
System.out.println("Expression: " + expression);
assertEquals(100 - 50 * 70 - 30 * 80 - 60, test(expression));
}
public void testMath22() {
String expression = "(100-50)*70-30*(20-9)**3";
System.out.println("Expression: " + expression);
assertEquals((int) ((100 - 50) * 70 - 30 * Math.pow(20 - 9, 3)), test(expression));
}
public void testMath22b() {
String expression = "a = 100; b = 50; c = 70; d = 30; e = 20; f = 9; g = 3; (a-b)*c-d*(e-f)**g";
System.out.println("Expression: " + expression);
assertEquals((int) ((100 - 50) * 70 - 30 * Math.pow(20 - 9, 3)), testCompiledSimple(expression, new HashMap()));
}
public void testMath23() {
String expression = "10 ** (3)*10**3";
System.out.println("Expression: " + expression);
assertEquals((int) (Math.pow(10, 3) * Math.pow(10, 3)), test(expression));
}
public void testMath24() {
String expression = "51 * 52 * 33 / 24 / 15 + 45 * 66 * 47 * 28 + 19";
double val = 51d * 52d * 33d / 24d / 15d + 45d * 66d * 47d * 28d + 19d;
System.out.println("Expression: " + expression);
System.out.println("Expected Result: " + val);
assertEquals(val, test(expression));
}
public void testMath25() {
String expression = "51 * (4 - 100 * 5) + 10 + 5 * 2 / 1 + 0 + 0 - 80";
int val = 51 * (4 - 100 * 5) + 10 + 5 * 2 / 1 + 0 + 0 - 80;
System.out.println("Expression: " + expression);
System.out.println("Expected Result: " + val);
assertEquals(val, test(expression));
}
public void testMath26() {
String expression = "5 + 3 * 8 * 2 ** 2";
int val = (int) (5d + 3d * 8d * Math.pow(2, 2));
System.out.println("Expression: " + expression);
System.out.println("Expected Result: " + val);
Object result = test(expression);
assertEquals(val, result);
}
public void testMath27() {
String expression = "50 + 30 * 80 * 20 ** 3 * 51";
double val = 50 + 30 * 80 * Math.pow(20, 3) * 51;
System.out.println("Expression: " + expression);
System.out.println("Expected Result: " + val);
Object result = test(expression);
assertEquals((int) val, result);
}
public void testMath28() {
String expression = "50 + 30 + 80 + 11 ** 2 ** 2 * 51";
double val = 50 + 30 + 80 + Math.pow(Math.pow(11, 2), 2) * 51;
Object result = test(expression);
assertEquals((int) val, result);
}
public void testMath29() {
String expression = "10 + 20 / 4 / 4";
System.out.println("Expression: " + expression);
double val = 10d + 20d / 4d / 4d;
assertEquals(val, MVEL.eval(expression));
}
public void testMath30() {
String expression = "40 / 20 + 10 + 6 / 2";
float val = 40f / 20f + 10f + 6f / 2f;
assertEquals((int) val, MVEL.eval(expression));
}
public void testMath31() {
String expression = "40 / 20 + 5 - 4 + 8 / 2 * 2 * 6 ** 2 + 6 - 8";
double val = 40f / 20f + 5f - 4f + 8f / 2f * 2f * Math.pow(6, 2) + 6f - 8f;
assertEquals((int) val, MVEL.eval(expression));
}
public void testMath34() {
String expression = "a+b-c*d*x/y-z+10";
Map map = new HashMap();
map.put("a", 200);
map.put("b", 100);
map.put("c", 150);
map.put("d", 2);
map.put("x", 400);
map.put("y", 300);
map.put("z", 75);
Serializable s = MVEL.compileExpression(expression);
assertEquals(200 + 100 - 150 * 2 * 400 / 300 - 75 + 10, MVEL.executeExpression(s, map));
}
public void testMath34_Interpreted() {
String expression = "a+b-c*x/y-z";
Map map = new HashMap();
map.put("a", 200);
map.put("b", 100);
map.put("c", 150);
map.put("x", 400);
map.put("y", 300);
map.put("z", 75);
assertEquals(200 + 100 - 150 * 400 / 300 - 75, MVEL.eval(expression, map));
}
public void testMath35() {
String expression = "b/x/b/b*y+a";
Map map = new HashMap();
map.put("a", 10);
map.put("b", 20);
map.put("c", 30);
map.put("x", 40);
map.put("y", 50);
map.put("z", 60);
Serializable s = MVEL.compileExpression(expression);
assertNumEquals(20d / 40d / 20d / 20d * 50d + 10d, MVEL.executeExpression(s, map));
}
public void testMath35_Interpreted() {
String expression = "b/x/b/b*y+a";
Map map = new HashMap();
map.put("a", 10);
map.put("b", 20);
map.put("c", 30);
map.put("x", 40);
map.put("y", 50);
map.put("z", 60);
assertNumEquals(20d / 40d / 20d / 20d * 50d + 10d, MVEL.eval(expression, map));
}
public void testMath36() {
String expression = "b/x*z/a+x-b+x-b/z+y";
Map map = new HashMap();
map.put("a", 10);
map.put("b", 20);
map.put("c", 30);
map.put("x", 40);
map.put("y", 50);
map.put("z", 60);
Serializable s = MVEL.compileExpression(expression);
assertNumEquals(20d / 40d * 60d / 10d + 40d - 20d + 40d - 20d / 60d + 50d, MVEL.executeExpression(s, map));
}
public void testMath37() {
String expression = "x+a*a*c/x*b*z+x/y-b";
Map map = new HashMap();
map.put("a", 10);
map.put("b", 20);
map.put("c", 30);
map.put("x", 2);
map.put("y", 2);
map.put("z", 60);
Serializable s = MVEL.compileExpression(expression);
assertNumEquals(2d + 10d * 10d * 30d / 2d * 20d * 60d + 2d / 2d - 20d, MVEL.executeExpression(s, map));
}
public void testNullSafe() {
Foo foo = new Foo();
foo.setBar(null);
Map map = new HashMap();
map.put("foo", foo);
String expression = "foo.?bar.name == null";
Serializable compiled = MVEL.compileExpression(expression);
OptimizerFactory.setDefaultOptimizer("ASM");
assertEquals(true, executeExpression(compiled, map));
assertEquals(true, executeExpression(compiled, map)); // execute a second time (to search for optimizer problems)
OptimizerFactory.setDefaultOptimizer("reflective");
assertEquals(true, executeExpression(compiled, map));
assertEquals(true, executeExpression(compiled, map)); // execute a second time (to search for optimizer problems)
assertEquals(true, eval(expression, map));
}
/**
* MVEL-57 (Submitted by: Rognvald Eaversen) -- Slightly modified by cbrock to include a positive testcase.
*/
public void testMethodInvocationWithCollectionElement() {
context = new HashMap();
context.put("pojo", new POJO());
context.put("number", "1192800637980");
Object result = MVEL.eval("pojo.function(pojo.dates[0].time)", context);
assertEquals(String.valueOf(((POJO) context.get("pojo")).getDates().iterator().next().getTime()), result);
}
public void testNestedWithInList() {
Recipient recipient1 = new Recipient();
recipient1.setName("userName1");
recipient1.setEmail("[email protected]");
Recipient recipient2 = new Recipient();
recipient2.setName("userName2");
recipient2.setEmail("[email protected]");
List list = new ArrayList();
list.add(recipient1);
list.add(recipient2);
String text =
"array = [" +
"(with ( new Recipient() ) {name = 'userName1', email = '[email protected]' })," +
"(with ( new Recipient() ) {name = 'userName2', email = '[email protected]' })];\n";
ParserContext context = new ParserContext();
context.addImport(Recipient.class);
ExpressionCompiler compiler = new ExpressionCompiler(text);
Serializable execution = compiler.compile(context);
List result = (List) MVEL.executeExpression(execution);
assertEquals(list, result);
}
// public void testNestedWithInMethod() {
// Recipient recipient1 = new Recipient();
// recipient1.setName("userName1");
// recipient1.setEmail("[email protected]");
//
// Recipients recipients = new Recipients();
// recipients.addRecipient(recipient1);
//
// String text =
// "recipients = new Recipients();\n" +
// "recipients.addRecipient( (with ( new Recipient() ) {name = 'userName1', email = '[email protected]' }) );\n" +
// "return recipients;\n";
//
// ParserContext context;
// context = new ParserContext();
// context.addImport(Recipient.class);
// context.addImport(Recipients.class);
//
// ExpressionCompiler compiler = new ExpressionCompiler(text);
// Serializable execution = compiler.compile(context);
// Recipients result = (Recipients) MVEL.executeExpression(execution);
// assertEquals(recipients, result);
// }
//
// public void testNestedWithInComplexGraph() {
// Recipients recipients = new Recipients();
//
// Recipient recipient1 = new Recipient();
// recipient1.setName("user1");
// recipient1.setEmail("[email protected]");
// recipients.addRecipient(recipient1);
//
// Recipient recipient2 = new Recipient();
// recipient2.setName("user2");
// recipient2.setEmail("[email protected]");
// recipients.addRecipient(recipient2);
//
// EmailMessage msg = new EmailMessage();
// msg.setRecipients(recipients);
// msg.setFrom("[email protected]");
//
// String text = "(with ( new EmailMessage() ) { recipients = (with (new Recipients()) { recipients = [(with ( new Recipient() ) {name = 'user1', email = '[email protected]'}), (with ( new Recipient() ) {name = 'user2', email = '[email protected]'}) ] }), " +
// " from = '[email protected]' } )";
// ParserContext context;
// context = new ParserContext();
// context.addImport(Recipient.class);
// context.addImport(Recipients.class);
// context.addImport(EmailMessage.class);
//
// ExpressionCompiler compiler = new ExpressionCompiler(text);
// Serializable execution = compiler.compile(context);
// EmailMessage result = (EmailMessage) MVEL.executeExpression(execution);
// assertEquals(msg, result);
// }
//
// public void testNestedWithInComplexGraph2() {
// Recipients recipients = new Recipients();
//
// Recipient recipient1 = new Recipient();
// recipient1.setName("user1");
// recipient1.setEmail("[email protected]");
// recipients.addRecipient(recipient1);
//
// Recipient recipient2 = new Recipient();
// recipient2.setName("user2");
// recipient2.setEmail("[email protected]");
// recipients.addRecipient(recipient2);
//
// EmailMessage msg = new EmailMessage();
// msg.setRecipients(recipients);
// msg.setFrom("[email protected]");
//
// String text = "";
// text += "with( new EmailMessage() ) { ";
// text += " recipients = with( new Recipients() ){ ";
// text += " recipients = [ with( new Recipient() ) { name = 'user1', email = '[email protected]' }, ";
// text += " with( new Recipient() ) { name = 'user2', email = '[email protected]' } ] ";
// text += " }, ";
// text += " from = '[email protected]' }";
// ParserContext context;
// context = new ParserContext();
// context.addImport(Recipient.class);
// context.addImport(Recipients.class);
// context.addImport(EmailMessage.class);
//
// ExpressionCompiler compiler = new ExpressionCompiler(text);
// Serializable execution = compiler.compile(context);
// EmailMessage result = (EmailMessage) MVEL.executeExpression(execution);
// assertEquals(msg, result);
// }
public void testNestedWithInComplexGraph3() {
Recipients recipients = new Recipients();
Recipient recipient1 = new Recipient();
recipient1.setName("user1");
recipient1.setEmail("[email protected]");
recipients.addRecipient(recipient1);
Recipient recipient2 = new Recipient();
recipient2.setName("user2");
recipient2.setEmail("[email protected]");
recipients.addRecipient(recipient2);
EmailMessage msg = new EmailMessage();
msg.setRecipients(recipients);
msg.setFrom("[email protected]");
String text = "";
text += "new EmailMessage().{ ";
text += " recipients = new Recipients().{ ";
text += " recipients = [ new Recipient().{ name = 'user1', email = '[email protected]' }, ";
text += " new Recipient().{ name = 'user2', email = '[email protected]' } ] ";
text += " }, ";
text += " from = '[email protected]' }";
ParserContext context;
context = new ParserContext();
context.addImport(Recipient.class);
context.addImport(Recipients.class);
context.addImport(EmailMessage.class);
OptimizerFactory.setDefaultOptimizer("ASM");
ExpressionCompiler compiler = new ExpressionCompiler(text);
Serializable execution = compiler.compile(context);
assertEquals(msg, MVEL.executeExpression(execution));
assertEquals(msg, MVEL.executeExpression(execution));
assertEquals(msg, MVEL.executeExpression(execution));
OptimizerFactory.setDefaultOptimizer("reflective");
context = new ParserContext(context.getParserConfiguration());
compiler = new ExpressionCompiler(text);
execution = compiler.compile(context);
assertEquals(msg, MVEL.executeExpression(execution));
assertEquals(msg, MVEL.executeExpression(execution));
assertEquals(msg, MVEL.executeExpression(execution));
}
public static class Recipient {
private String name;
private String email;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((email == null) ? 0 : email.hashCode());
result = prime * result + ((name == null) ? 0 : name.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
final Recipient other = (Recipient) obj;
if (email == null) {
if (other.email != null) return false;
}
else if (!email.equals(other.email)) return false;
if (name == null) {
if (other.name != null) return false;
}
else if (!name.equals(other.name)) return false;
return true;
}
}
public static class Recipients {
private List<Recipient> list = Collections.EMPTY_LIST;
public void setRecipients(List<Recipient> recipients) {
this.list = recipients;
}
public boolean addRecipient(Recipient recipient) {
if (list == Collections.EMPTY_LIST) {
this.list = new ArrayList<Recipient>();
}
if (!this.list.contains(recipient)) {
this.list.add(recipient);
return true;
}
return false;
}
public boolean removeRecipient(Recipient recipient) {
return this.list.remove(recipient);
}
public List<Recipient> getRecipients() {
return this.list;
}
public Recipient[] toArray() {
return list.toArray(new Recipient[list.size()]);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((list == null) ? 0 : list.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
final Recipients other = (Recipients) obj;
if (list == null) {
if (other.list != null) return false;
}
return list.equals(other.list);
}
}
public static class EmailMessage {
private Recipients recipients;
private String from;
public EmailMessage() {
}
public Recipients getRecipients() {
return recipients;
}
public void setRecipients(Recipients recipients) {
this.recipients = recipients;
}
public String getFrom() {
return from;
}
public void setFrom(String from) {
this.from = from;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((from == null) ? 0 : from.hashCode());
result = prime * result + ((recipients == null) ? 0 : recipients.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
final EmailMessage other = (EmailMessage) obj;
if (from == null) {
if (other.from != null) return false;
}
else if (!from.equals(other.from)) return false;
if (recipients == null) {
if (other.recipients != null) return false;
}
else if (!recipients.equals(other.recipients)) return false;
return true;
}
}
public class POJO {
private Set<Date> dates = new HashSet<Date>();
public POJO() {
dates.add(new Date());
}
public Set<Date> getDates() {
return dates;
}
public void setDates(Set<Date> dates) {
this.dates = dates;
}
public String function(long num) {
return String.valueOf(num);
}
}
public void testSubEvaluation() {
HashMap<String, Object> map = new HashMap<String, Object>();
map.put("EV_BER_BER_NR", "12345");
map.put("EV_BER_BER_PRIV", Boolean.FALSE);
assertEquals("12345", testCompiledSimple("EV_BER_BER_NR + ((EV_BER_BER_PRIV != empty && EV_BER_BER_PRIV == true) ? \"/PRIVAT\" : '')", null, map));
map.put("EV_BER_BER_PRIV", Boolean.TRUE);
assertEquals("12345/PRIVAT", testCompiledSimple("EV_BER_BER_NR + ((EV_BER_BER_PRIV != empty && EV_BER_BER_PRIV == true) ? \"/PRIVAT\" : '')", null, map));
}
public void testNestedMethod1() {
Vector vectorA = new Vector();
Vector vectorB = new Vector();
vectorA.add("Foo");
Map map = new HashMap();
map.put("vecA", vectorA);
map.put("vecB", vectorB);
testCompiledSimple("vecB.add(vecA.remove(0)); vecA.add('Foo');", null, map);
assertEquals("Foo", vectorB.get(0));
}
public void testNegativeArraySizeBug() throws Exception {
String expressionString1 = "results = new java.util.ArrayList(); foreach (element : elements) { if( ( {30, 214, 158, 31, 95, 223, 213, 86, 159, 34, 32, 96, 224, 160, 85, 201, 29, 157, 100, 146, 82, 203, 194, 145, 140, 81, 27, 166, 212, 38, 28, 94, 168, 23, 87, 150, 35, 149, 193, 33, 132, 206, 93, 196, 24, 88, 195, 36, 26, 154, 167, 108, 204, 74, 46, 25, 153, 202, 79, 207, 143, 43, 16, 80, 198, 208, 144, 41, 97, 142, 83, 18, 162, 103, 155, 98, 44, 17, 205, 77, 156, 141, 165, 102, 84, 37, 101, 222, 40, 104, 99, 177, 182, 22, 180, 21, 137, 221, 179, 78, 42, 178, 19, 183, 139, 218, 219, 39, 220, 20, 184, 217, 138, 62, 190, 171, 123, 113, 59, 118, 225, 124, 169, 60, 117, 1} contains element.attribute ) ) { results.add(element); } }; results";
String expressionString2 = "results = new java.util.ArrayList(); foreach (element : elements) { if( ( {30, 214, 158, 31, 95, 223, 213, 86, 159, 34, 32, 96, 224, 160, 85, 201, 29, 157, 100, 146, 82, 203, 194, 145, 140, 81, 27, 166, 212, 38, 28, 94, 168, 23, 87, 150, 35, 149, 193, 33, 132, 206, 93, 196, 24, 88, 195, 36, 26, 154, 167, 108, 204, 74, 46, 25, 153, 202, 79, 207, 143, 43, 16, 80, 198, 208, 144, 41, 97, 142, 83, 18, 162, 103, 155, 98, 44, 17, 205, 77, 156, 141, 165, 102, 84, 37, 101, 222, 40, 104, 99, 177, 182, 22, 180, 21, 137, 221, 179, 78, 42, 178, 19, 183, 139, 218, 219, 39, 220, 20, 184, 217, 138, 62, 190, 171, 123, 113, 59, 118, 225, 124, 169, 60, 117, 1, 61, 189, 122, 68, 58, 119, 63, 226, 3, 172} contains element.attribute ) ) { results.add(element); } }; results";
List<Target> targets = new ArrayList<Target>();
targets.add(new Target(1));
targets.add(new Target(999));
Map vars = new HashMap();
vars.put("elements", targets);
assertEquals(1, ((List) testCompiledSimple(expressionString1, null, vars)).size());
assertEquals(1, ((List) testCompiledSimple(expressionString2, null, vars)).size());
}
public static final class Target {
private int _attribute;
public Target(int attribute_) {
_attribute = attribute_;
}
public int getAttribute() {
return _attribute;
}
}
public void testFunctionDefAndCall() {
assertEquals("FoobarFoobar",
test("function heyFoo() { return 'Foobar'; };\n" +
"return heyFoo() + heyFoo();"));
}
public void testFunctionDefAndCall2() {
ExpressionCompiler compiler = new ExpressionCompiler("function heyFoo() { return 'Foobar'; };\n" +
"return heyFoo() + heyFoo();");
Serializable s = compiler.compile();
Map<String, Function> m = CompilerTools.extractAllDeclaredFunctions((CompiledExpression) s);
assertTrue(m.containsKey("heyFoo"));
OptimizerFactory.setDefaultOptimizer("reflective");
assertEquals("FoobarFoobar", MVEL.executeExpression(s, new HashMap()));
assertEquals("FoobarFoobar", MVEL.executeExpression(s, new HashMap()));
OptimizerFactory.setDefaultOptimizer("dynamic");
}
public void testFunctionDefAndCall3() {
assertEquals("FOOBAR", test("function testFunction() { a = 'foo'; b = 'bar'; a + b; }; testFunction().toUpperCase(); "));
}
public void testFunctionDefAndCall4() {
assertEquals("barfoo", test("function testFunction(input) { return input; }; testFunction('barfoo');"));
}
public void testFunctionDefAndCall5() {
assertEquals(10, test("function testFunction(x, y) { return x + y; }; testFunction(7, 3);"));
}
public void testFunctionDefAndCall6() {
assertEquals("foo", MVEL.eval("def fooFunction(x) x; fooFunction('foo')", new HashMap()));
}
public void testDynamicImports2() {
assertEquals(BufferedReader.class, test("import java.io.*; BufferedReader"));
}
public void testStringWithTernaryIf() {
test("System.out.print(\"Hello : \" + (foo != null ? \"FOO!\" : \"NO FOO\") + \". Bye.\");");
}
public void testFunctionsScript1() throws IOException {
MVEL.evalFile(new File("samples/scripts/functions1.mvel"));
}
public void testQuickSortScript1() throws IOException {
MVEL.evalFile(new File("samples/scripts/quicksort.mvel"));
}
public void testQuickSortScript2() throws IOException {
Object[] sorted = (Object[]) test(new String(loadFromFile(new File("samples/scripts/quicksort.mvel"))));
int last = -1;
for (Object o : sorted) {
if (last == -1) {
last = (Integer) o;
}
else {
assertTrue(((Integer) o) > last);
last = (Integer) o;
}
}
}
public void testQuickSortScript3() throws IOException {
Object[] sorted = (Object[]) test(new String(loadFromFile(new File("samples/scripts/quicksort2.mvel"))));
int last = -1;
for (Object o : sorted) {
if (last == -1) {
last = (Integer) o;
}
else {
assertTrue(((Integer) o) > last);
last = (Integer) o;
}
}
}
public void testMultiLineString() throws IOException {
MVEL.evalFile(new File("samples/scripts/multilinestring.mvel"));
}
public void testCompactIfElse() {
assertEquals("foo", test("if (false) 'bar'; else 'foo';"));
}
public void testAndOpLiteral() {
assertEquals(true, test("true && true"));
}
public void testAnonymousFunctionDecl() {
assertEquals(3, test("anonFunc = function (a,b) { return a + b; }; anonFunc(1,2)"));
}
public void testFunctionSemantics() {
assertEquals(true, test("function fooFunction(a) { return a; }; x__0 = ''; 'boob' == fooFunction(x__0 = 'boob') && x__0 == 'boob';"));
}
public void testUseOfVarKeyword() {
assertEquals("FOO_BAR", test("var barfoo = 'FOO_BAR'; return barfoo;"));
}
public void testAssignment5() {
assertEquals(15, test("x = (10) + (5); x"));
}
public void testSetExpressions1() {
Map<String, Object> myMap = new HashMap<String, Object>();
final Serializable fooExpr = MVEL.compileSetExpression("foo");
MVEL.executeSetExpression(fooExpr, myMap, "blah");
assertEquals("blah", myMap.get("foo"));
MVEL.executeSetExpression(fooExpr, myMap, "baz");
assertEquals("baz", myMap.get("foo"));
}
public void testInlineCollectionNestedObjectCreation() {
Map m = (Map) test("['Person.age' : [1, 2, 3, 4], 'Person.rating' : ['High', 'Low']," +
" 'Person.something' : (new String('foo').toUpperCase())]");
assertEquals("FOO", m.get("Person.something"));
}
public void testInlineCollectionNestedObjectCreation1() {
Map m = (Map) test("[new String('foo') : new String('bar')]");
assertEquals("bar", m.get("foo"));
}
public void testEgressType() {
ExpressionCompiler compiler = new ExpressionCompiler("( $cheese )");
ParserContext context = new ParserContext();
context.addInput("$cheese", Cheese.class);
ExecutableStatement expr = compiler.compile(context);
assertEquals(Cheese.class, expr.getKnownEgressType());
}
public void testDuplicateVariableDeclaration() {
ExpressionCompiler compiler = new ExpressionCompiler("String x = \"abc\"; Integer x = new Integer( 10 );");
ParserContext context = new ParserContext();
try {
compiler.compile(context);
fail("Compilation must fail with duplicate variable declaration exception.");
}
catch (CompileException ce) {
// success
}
}
public void testFullyQualifiedTypeAndCast() {
assertEquals(1, test("java.lang.Integer number = (java.lang.Integer) '1';"));
}
public void testAnonymousFunction() {
assertEquals("foobar", test("a = function { 'foobar' }; a();"));
}
public void testThreadSafetyInterpreter1() {
//First evaluation
System.out.println("First evaluation: " + MVEL.eval("true"));
new Thread(new Runnable() {
public void run() {
// Second evaluation - this succeeds only if the first evaluation is not commented out
System.out.println("Second evaluation: " + MVEL.eval("true"));
}
}).start();
}
public void testStringEquals() {
assertEquals(true, test("ipaddr == '10.1.1.2'"));
}
public void testArrayList() throws SecurityException, NoSuchMethodException {
Collection<String> collection = new ArrayList<String>();
collection.add("I CAN HAS CHEEZBURGER");
assertEquals(collection.size(), MVEL.eval("size()", collection));
}
public void testUnmodifiableCollection() throws SecurityException, NoSuchMethodException {
Collection<String> collection = new ArrayList<String>();
collection.add("I CAN HAS CHEEZBURGER");
collection = unmodifiableCollection(collection);
assertEquals(collection.size(), MVEL.eval("size()", collection));
}
public void testSingleton() throws SecurityException, NoSuchMethodException {
Collection<String> collection = Collections.singleton("I CAN HAS CHEEZBURGER");
assertEquals(collection.size(), MVEL.eval("size()", collection));
}
public void testCharComparison() {
assertEquals(true, test("'z' > 'a'"));
}
public void testCharComparison2() {
assertEquals(false, test("'z' < 'a'"));
}
public void testRegExMatch() {
assertEquals(true, MVEL.eval("$test = 'foo'; $ex = 'f.*'; $test ~= $ex", new HashMap()));
}
public static class TestClass2 {
public void addEqualAuthorizationConstraint(Foo leg, Bar ctrlClass, Integer authorization) {
}
}
public void testJIRA93() {
Map testMap = createTestMap();
testMap.put("testClass2", new TestClass2());
Serializable s = MVEL.compileExpression("testClass2.addEqualAuthorizationConstraint(foo, foo.bar, 5)");
for (int i = 0; i < 5; i++) {
MVEL.executeExpression(s, testMap);
}
}
public void testJIRA96() {
ParserContext ctx = new ParserContext();
ctx.setStrictTypeEnforcement(true);
ctx.addInput("fooString", String[].class);
ExpressionCompiler compiler = new ExpressionCompiler("fooString[0].toUpperCase()");
compiler.compile(ctx);
}
public void testStrongTyping() {
ParserContext ctx = new ParserContext();
ctx.setStrongTyping(true);
try {
new ExpressionCompiler("blah").compile(ctx);
}
catch (Exception e) {
// should fail
return;
}
assertTrue(false);
}
public void testStrongTyping2() {
ParserContext ctx = new ParserContext();
ctx.setStrongTyping(true);
ctx.addInput("blah", String.class);
try {
new ExpressionCompiler("1-blah").compile(ctx);
}
catch (Exception e) {
e.printStackTrace();
return;
}
assertTrue(false);
}
public void testStringToArrayCast() {
Object o = test("(char[]) 'abcd'");
assertTrue(o instanceof char[]);
}
public void testStringToArrayCast2() {
assertTrue((Boolean) test("_xyxy = (char[]) 'abcd'; _xyxy[0] == 'a'"));
}
public void testStaticallyTypedArrayVar() {
assertTrue((Boolean) test("char[] _c___ = new char[10]; _c___ instanceof char[]"));
}
public void testParserErrorHandling() {
final ParserContext ctx = new ParserContext();
ExpressionCompiler compiler = new ExpressionCompiler("a[");
try {
compiler.compile(ctx);
}
catch (Exception e) {
return;
}
assertTrue(false);
}
public void testJIRA99_Interpreted() {
Map map = new HashMap();
map.put("x", 20);
map.put("y", 10);
map.put("z", 5);
assertEquals(20 - 10 - 5, MVEL.eval("x - y - z", map));
}
public void testJIRA99_Compiled() {
Map map = new HashMap();
map.put("x", 20);
map.put("y", 10);
map.put("z", 5);
assertEquals(20 - 10 - 5, testCompiledSimple("x - y - z", map));
}
public void testJIRA100() {
assertEquals(20, test("java.math.BigDecimal axx = new java.math.BigDecimal( 10.0 ); java.math.BigDecimal bxx = new java.math.BigDecimal( 10.0 ); java.math.BigDecimal cxx = axx + bxx; return cxx; "));
}
public void testJIRA100a() {
assertEquals(233.23, test("java.math.BigDecimal axx = new java.math.BigDecimal( 109.45 ); java.math.BigDecimal bxx = new java.math.BigDecimal( 123.78 ); java.math.BigDecimal cxx = axx + bxx; return cxx; "));
}
public void testJIRA100b() {
String expression = "(8 / 10) * 100 <= 80;";
assertEquals((8 / 10) * 100 <= 80, testCompiledSimple(expression, new HashMap()));
}
public void testJIRA92() {
assertEquals(false, test("'stringValue' > null"));
}
public void testAssignToBean() {
Person person = new Person();
MVEL.eval("this.name = 'foo'", person);
assertEquals("foo", person.getName());
Serializable s = MVEL.compileExpression("this.name = 'bar'");
MVEL.executeExpression(s, person);
assertEquals("bar", person.getName());
}
public void testParameterizedTypeInStrictMode() {
ParserContext ctx = new ParserContext();
ctx.setStrongTyping(true);
ctx.addInput("foo", HashMap.class, new Class[]{String.class, String.class});
ExpressionCompiler compiler = new ExpressionCompiler("foo.get('bar').toUpperCase()");
compiler.compile(ctx);
}
public void testParameterizedTypeInStrictMode2() {
ParserContext ctx = new ParserContext();
ctx.setStrongTyping(true);
ctx.addInput("ctx", Object.class);
ExpressionCompiler compiler = new ExpressionCompiler("org.mvel.DataConversion.convert(ctx, String).toUpperCase()");
CompiledExpression ce = compiler.compile(ctx);
assertEquals(String.class, ce.getKnownEgressType());
}
public void testParameterizedTypeInStrictMode3() {
ParserContext ctx = new ParserContext();
ctx.setStrongTyping(true);
ctx.addInput("base", Base.class);
ExpressionCompiler compiler = new ExpressionCompiler("base.list");
CompiledExpression c = compiler.compile(ctx);
assertTrue(c.getParserContext().getLastTypeParameters()[0].equals(String.class));
}
public void testParameterizedTypeInStrictMode4() {
ParserContext ctx = new ParserContext();
ctx.setStrongTyping(true);
ctx.addInput("base", Base.class);
ExpressionCompiler compiler = new ExpressionCompiler("base.list.get(1).toUpperCase()");
CompiledExpression ce = compiler.compile(ctx);
assertEquals(String.class, ce.getKnownEgressType());
}
public void testMapAssignmentNestedExpression() {
Map map = new HashMap();
map.put("map", new HashMap());
String ex = "map[java.lang.Integer.MAX_VALUE] = 'bar'; map[java.lang.Integer.MAX_VALUE];";
Serializable s = MVEL.compileExpression(ex);
assertEquals("bar", MVEL.executeExpression(s, map));
assertEquals("bar", MVEL.eval(ex, map));
}
public void testMapAssignmentNestedExpression2() {
Map map = new HashMap();
map.put("x", "bar");
map.put("map", new HashMap());
String ex = "map[x] = 'foo'; map['bar'];";
Serializable s = MVEL.compileExpression(ex);
assertEquals("foo", MVEL.executeExpression(s, map));
assertEquals("foo", MVEL.eval(ex, map));
}
/**
* MVEL-103
*/
public static class MvelContext {
public boolean singleCalled;
public boolean arrayCalled;
public void methodForTest(String string) {
System.out.println("sigle param method called!");
singleCalled = true;
}
public void methodForTest(String[] strings) {
System.out.println("array param method called!");
arrayCalled = true;
}
}
public void testMethodResolutionOrder() {
MvelContext mvelContext = new MvelContext();
MVEL.eval("methodForTest({'1','2'})", mvelContext);
MVEL.eval("methodForTest('1')", mvelContext);
assertTrue(mvelContext.arrayCalled && mvelContext.singleCalled);
}
public void testOKQuoteComment() throws Exception {
// ' in comments outside of blocks seem OK
MVEL.compileExpression("// ' this is OK!");
MVEL.compileExpression("// ' this is OK!\n");
MVEL.compileExpression("// ' this is OK!\nif(1==1) {};");
}
public void testOKDblQuoteComment() throws Exception {
// " in comments outside of blocks seem OK
MVEL.compileExpression("// \" this is OK!");
MVEL.compileExpression("// \" this is OK!\n");
MVEL.compileExpression("// \" this is OK!\nif(1==1) {};");
}
public void testIfComment() throws Exception {
// No quote? OK!
MVEL.compileExpression("if(1 == 1) {\n" +
" // Quote & Double-quote seem to break this expression\n" +
"}");
}
public void testIfQuoteCommentBug() throws Exception {
// Comments in an if seem to fail if they contain a '
MVEL.compileExpression("if(1 == 1) {\n" +
" // ' seems to break this expression\n" +
"}");
}
public void testIfDblQuoteCommentBug() throws Exception {
// Comments in a foreach seem to fail if they contain a '
MVEL.compileExpression("if(1 == 1) {\n" +
" // ' seems to break this expression\n" +
"}");
}
public void testForEachQuoteCommentBug() throws Exception {
// Comments in a foreach seem to fail if they contain a '
MVEL.compileExpression("foreach ( item : 10 ) {\n" +
" // The ' character causes issues\n" +
"}");
}
public void testForEachDblQuoteCommentBug() throws Exception {
// Comments in a foreach seem to fail if they contain a '
MVEL.compileExpression("foreach ( item : 10 ) {\n" +
" // The \" character causes issues\n" +
"}");
}
public void testForEachCommentOK() throws Exception {
// No quote? OK!
MVEL.compileExpression("foreach ( item : 10 ) {\n" +
" // The quote & double quote characters cause issues\n" +
"}");
}
public void testElseIfCommentBugPreCompiled() throws Exception {
// Comments can't appear before else if() - compilation works, but evaluation fails
MVEL.executeExpression(MVEL.compileExpression("// This is never true\n" +
"if (1==0) {\n" +
" // Never reached\n" +
"}\n" +
"// This is always true...\n" +
"else if (1==1) {" +
" System.out.println('Got here!');" +
"}\n"));
}
public void testElseIfCommentBugEvaluated() throws Exception {
// Comments can't appear before else if()
MVEL.eval("// This is never true\n" +
"if (1==0) {\n" +
" // Never reached\n" +
"}\n" +
"// This is always true...\n" +
"else if (1==1) {" +
" System.out.println('Got here!');" +
"}\n");
}
public void testRegExpOK() throws Exception {
// This works OK intepreted
assertEquals(Boolean.TRUE, MVEL.eval("'Hello'.toUpperCase() ~= '[A-Z]{0,5}'"));
assertEquals(Boolean.TRUE, MVEL.eval("1 == 0 || ('Hello'.toUpperCase() ~= '[A-Z]{0,5}')"));
// This works OK if toUpperCase() is avoided in pre-compiled
Object ser = MVEL.compileExpression("'Hello' ~= '[a-zA-Z]{0,5}'");
assertEquals(Boolean.TRUE, MVEL.executeExpression(ser));
}
public void testRegExpPreCompiledBug() throws Exception {
// If toUpperCase() is used in the expression then this fails; returns null not
// a boolean.
Object ser = MVEL.compileExpression("'Hello'.toUpperCase() ~= '[a-zA-Z]{0,5}'");
assertEquals(Boolean.TRUE, MVEL.executeExpression(ser));
}
public void testRegExpOrBug() throws Exception {
// This fails during execution due to returning null, I think...
Object ser = MVEL.compileExpression("1 == 0 || ('Hello'.toUpperCase() ~= '[A-Z]{0,5}')");
assertEquals(Boolean.TRUE, MVEL.executeExpression(ser));
}
public void testRegExpAndBug() throws Exception {
// This also fails due to returning null, I think...
Object ser = MVEL.compileExpression("1 == 1 && ('Hello'.toUpperCase() ~= '[A-Z]{0,5}')");
assertEquals(Boolean.TRUE, MVEL.executeExpression(ser));
}
public void testLiteralUnionWithComparison() {
Serializable ce = MVEL.compileExpression("'Foo'.toUpperCase() == 'FOO'");
assertEquals(Boolean.TRUE, MVEL.executeExpression(ce));
}
public static final List<String> STRINGS = Arrays.asList("hi", "there");
public static class A {
public List<String> getStrings() {
return STRINGS;
}
}
public final void testDetermineEgressParametricType() {
final ParserContext parserContext = new ParserContext();
parserContext.setStrongTyping(true);
parserContext.addInput("strings", List.class, new Class[]{String.class});
final CompiledExpression expr = new ExpressionCompiler("strings").compile(parserContext);
assertTrue(STRINGS.equals(MVEL.executeExpression(expr, new A())));
final Type[] typeParameters = expr.getParserContext().getLastTypeParameters();
assertTrue(typeParameters != null);
assertTrue(String.class.equals(typeParameters[0]));
}
public final void testDetermineEgressParametricType2() {
final ParserContext parserContext = new ParserContext();
parserContext.setStrongTyping(true);
parserContext.addInput("strings", List.class, new Class[]{String.class});
final CompiledExpression expr = new ExpressionCompiler("strings", parserContext)
.compile();
assertTrue(STRINGS.equals(MVEL.executeExpression(expr, new A())));
final Type[] typeParameters = expr.getParserContext().getLastTypeParameters();
assertTrue(null != typeParameters);
assertTrue(String.class.equals(typeParameters[0]));
}
public void testCustomPropertyHandler() {
PropertyHandlerFactory.registerPropertyHandler(SampleBean.class, new SampleBeanAccessor());
assertEquals("dog", test("foo.sampleBean.bar.name"));
}
public void testSetAccessorOverloadedEqualsStrictMode() {
ParserContext ctx = new ParserContext();
ctx.setStrongTyping(true);
ctx.addInput("foo", Foo.class);
try {
CompiledExpression expr = new ExpressionCompiler("foo.bar = 0").compile(ctx);
}
catch (CompileException e) {
// should fail.
e.printStackTrace();
return;
}
assertTrue(false);
}
public void testSetAccessorOverloadedEqualsStrictMode2() {
ParserContext ctx = new ParserContext();
ctx.setStrongTyping(true);
ctx.addInput("foo", Foo.class);
try {
CompiledExpression expr = new ExpressionCompiler("foo.aValue = 'bar'").compile(ctx);
}
catch (CompileException e) {
assertTrue(false);
}
}
public void testAnalysisCompile() {
CompiledExpression ce = new ExpressionCompiler("foo.aValue = 'bar'").compile();
assertTrue(ce.getParserContext().getInputs().keySet().contains("foo"));
}
public void testInlineWith() {
CompiledExpression expr = new ExpressionCompiler("foo.{name='poopy', aValue='bar'}").compile();
Foo f = (Foo) MVEL.executeExpression(expr, createTestMap());
assertEquals("poopy", f.getName());
assertEquals("bar", f.aValue);
}
public void testInlineWith2() {
CompiledExpression expr = new ExpressionCompiler("foo.{name = 'poopy', aValue = 'bar', bar.{name = 'foobie'}}").compile();
Foo f = (Foo) MVEL.executeExpression(expr, createTestMap());
assertEquals("poopy", f.getName());
assertEquals("bar", f.aValue);
assertEquals("foobie", f.getBar().getName());
}
public void testInlineWith3() {
CompiledExpression expr = new ExpressionCompiler("foo.{name = 'poopy', aValue = 'bar', bar.{name = 'foobie'}, toUC('doopy')}").compile();
Foo f = (Foo) MVEL.executeExpression(expr, createTestMap());
assertEquals("poopy", f.getName());
assertEquals("bar", f.aValue);
assertEquals("foobie", f.getBar().getName());
assertEquals("doopy", f.register);
}
public void testInlineWith4() {
OptimizerFactory.setDefaultOptimizer("ASM");
ExpressionCompiler expr = new ExpressionCompiler("new Foo().{ name = 'bar' }");
ParserContext pCtx = new ParserContext();
pCtx.addImport(Foo.class);
CompiledExpression c = expr.compile(pCtx);
Foo f = (Foo) MVEL.executeExpression(c);
assertEquals("bar", f.getName());
f = (Foo) MVEL.executeExpression(c);
assertEquals("bar", f.getName());
}
public void testInlineWithImpliedThis() {
Base b = new Base();
ExpressionCompiler expr = new ExpressionCompiler(".{ data = 'foo' }");
CompiledExpression compiled = expr.compile();
MVEL.executeExpression(compiled, b);
assertEquals(b.data, "foo");
}
public void testDataConverterStrictMode() throws Exception {
DataConversion.addConversionHandler(Date.class, new MVELDateCoercion());
ParserContext ctx = new ParserContext();
ctx.addImport("Cheese", Cheese.class);
ctx.setStrongTyping(true);
ctx.setStrictTypeEnforcement(true);
Cheese expectedCheese = new Cheese();
expectedCheese.setUseBy(new SimpleDateFormat("dd-MMM-yyyy").parse("10-Jul-1974"));
ExpressionCompiler compiler = new ExpressionCompiler("c = new Cheese(); c.useBy = '10-Jul-1974'; return c");
Serializable expr = compiler.compile(ctx);
Cheese actualCheese = (Cheese) executeExpression(expr, createTestMap());
assertEquals(expectedCheese.getUseBy(), actualCheese.getUseBy());
}
public static class MVELDateCoercion implements ConversionHandler {
public boolean canConvertFrom(Class cls) {
if (cls == String.class || cls.isAssignableFrom(Date.class)) {
return true;
}
else {
return false;
}
}
public Object convertFrom(Object o) {
try {
SimpleDateFormat sdf = new SimpleDateFormat("dd-MMM-yyyy");
if (o instanceof String) {
return sdf.parse((String) o);
}
else {
return o;
}
}
catch (Exception e) {
throw new RuntimeException("Exception was thrown", e);
}
}
}
private static final KnowledgeHelperFixer fixer = new KnowledgeHelperFixer();
public void testSingleLineCommentSlash() {
String result = fixer.fix(" //System.out.println( \"help\" );\r\n System.out.println( \"help\" ); \r\n list.add( $person );");
assertEquals(" //System.out.println( \"help\" );\r\n System.out.println( \"help\" ); \r\n list.add( $person );",
result);
}
public void testSingleLineCommentHash() {
String result = fixer.fix(" #System.out.println( \"help\" );\r\n System.out.println( \"help\" ); \r\n list.add( $person );");
assertEquals(" #System.out.println( \"help\" );\r\n System.out.println( \"help\" ); \r\n list.add( $person );",
result);
}
public void testMultiLineComment() {
String result = fixer.fix(" /*System.out.println( \"help\" );\r\n*/ System.out.println( \"help\" ); \r\n list.add( $person );");
assertEquals(" /*System.out.println( \"help\" );\r\n*/ System.out.println( \"help\" ); \r\n list.add( $person );",
result);
}
public void testAdd__Handle__Simple() {
String result = fixer.fix("update(myObject );");
assertEqualsIgnoreWhitespace("drools.update(myObject );",
result);
result = fixer.fix("update ( myObject );");
assertEqualsIgnoreWhitespace("drools.update( myObject );",
result);
}
public void testAdd__Handle__withNewLines() {
final String result = fixer.fix("\n\t\n\tupdate( myObject );");
assertEqualsIgnoreWhitespace("\n\t\n\tdrools.update( myObject );",
result);
}
public void testAdd__Handle__rComplex() {
String result = fixer.fix("something update( myObject); other");
assertEqualsIgnoreWhitespace("something drools.update( myObject); other",
result);
result = fixer.fix("something update ( myObject );");
assertEqualsIgnoreWhitespace("something drools.update( myObject );",
result);
result = fixer.fix(" update( myObject ); x");
assertEqualsIgnoreWhitespace(" drools.update( myObject ); x",
result);
//should not touch, as it is not a stand alone word
result = fixer.fix("xxupdate(myObject ) x");
assertEqualsIgnoreWhitespace("xxupdate(myObject ) x",
result);
}
public void testMultipleMatches() {
String result = fixer.fix("update(myObject); update(myObject );");
assertEqualsIgnoreWhitespace("drools.update(myObject); drools.update(myObject );",
result);
result = fixer.fix("xxx update(myObject ); update( myObject ); update( yourObject ); yyy");
assertEqualsIgnoreWhitespace("xxx drools.update(myObject ); drools.update( myObject ); drools.update( yourObject ); yyy",
result);
}
public void testAssert1() {
final String raw = "insert( foo );";
final String result = "drools.insert( foo );";
assertEqualsIgnoreWhitespace(result,
fixer.fix(raw));
}
public void testAssert2() {
final String raw = "some code; insert( new String(\"foo\") );\n More();";
final String result = "some code; drools.insert( new String(\"foo\") );\n More();";
assertEqualsIgnoreWhitespace(result,
fixer.fix(raw));
}
public void testAssertLogical() {
final String raw = "some code; insertLogical(new String(\"foo\"));\n More();";
final String result = "some code; drools.insertLogical(new String(\"foo\"));\n More();";
assertEqualsIgnoreWhitespace(result,
fixer.fix(raw));
}
public void testModifyRetractModifyInsert() {
final String raw = "some code; insert( bar ); modifyRetract( foo );\n More(); retract( bar ); modifyInsert( foo );";
final String result = "some code; drools.insert( bar ); drools.modifyRetract( foo );\n More(); drools.retract( bar ); drools.modifyInsert( foo );";
assertEqualsIgnoreWhitespace(result,
fixer.fix(raw));
}
public void testAllActionsMushedTogether() {
String result = fixer.fix("insert(myObject ); update(ourObject);\t retract(herObject);");
assertEqualsIgnoreWhitespace("drools.insert(myObject ); drools.update(ourObject);\t drools.retract(herObject);",
result);
result = fixer.fix("insert( myObject ); update(ourObject);\t retract(herObject );\ninsert( myObject ); update(ourObject);\t retract( herObject );");
assertEqualsIgnoreWhitespace("drools.insert( myObject ); drools.update(ourObject);\t drools.retract(herObject );\ndrools.insert( myObject ); drools.update(ourObject);\t drools.retract( herObject );",
result);
}
public void testLeaveLargeAlone() {
final String original = "yeah yeah yeah minsert( xxx ) this is a long() thing Person (name=='drools') modify a thing";
final String result = fixer.fix(original);
assertEqualsIgnoreWhitespace(original,
result);
}
public void testWithNull() {
final String original = null;
final String result = fixer.fix(original);
assertEqualsIgnoreWhitespace(original,
result);
}
public void testLeaveAssertAlone() {
final String original = "drools.insert(foo)";
assertEqualsIgnoreWhitespace(original,
fixer.fix(original));
}
public void testLeaveAssertLogicalAlone() {
final String original = "drools.insertLogical(foo)";
assertEqualsIgnoreWhitespace(original,
fixer.fix(original));
}
public void testWackyAssert() {
final String raw = "System.out.println($person1.getName() + \" and \" + $person2.getName() +\" are sisters\");\n" + "insert($person1.getName(\"foo\") + \" and \" + $person2.getName() +\" are sisters\"); yeah();";
final String expected = "System.out.println($person1.getName() + \" and \" + $person2.getName() +\" are sisters\");\n" + "drools.insert($person1.getName(\"foo\") + \" and \" + $person2.getName() +\" are sisters\"); yeah();";
assertEqualsIgnoreWhitespace(expected,
fixer.fix(raw));
}
public void testMoreAssertCraziness() {
final String raw = "foobar(); (insert(new String(\"blah\").get()); bangBangYudoHono();)";
assertEqualsIgnoreWhitespace("foobar(); (drools.insert(new String(\"blah\").get()); bangBangYudoHono();)",
fixer.fix(raw));
}
public void testRetract() {
final String raw = "System.out.println(\"some text\");retract(object);";
assertEqualsIgnoreWhitespace("System.out.println(\"some text\");drools.retract(object);",
fixer.fix(raw));
}
private void assertEqualsIgnoreWhitespace(final String expected,
final String actual) {
if (expected == null || actual == null) {
assertEquals(expected,
actual);
return;
}
final String cleanExpected = expected.replaceAll("\\s+",
"");
final String cleanActual = actual.replaceAll("\\s+",
"");
assertEquals(cleanExpected,
cleanActual);
}
public void testIsDefOperator() {
assertEquals(true, test("_v1 = 'bar'; isdef _v1"));
}
public void testIsDefOperator2() {
assertEquals(false, test("isdef _v1"));
}
public void testIsDefOperator3() {
assertEquals(true, test("!(isdef _v1)"));
}
public void testIsDefOperator4() {
assertEquals(true, test("! (isdef _v1)"));
}
public void testReturnType1() {
assertEquals(Double.class, new ExpressionCompiler("100.5").compile().getKnownEgressType());
}
public void testReturnType2() {
assertEquals(Integer.class, new ExpressionCompiler("1").compile().getKnownEgressType());
}
public void testStrongTyping3() {
ParserContext ctx = new ParserContext();
ctx.setStrongTyping(true);
try {
new ExpressionCompiler("foo.toUC(100.5").compile(ctx);
}
catch (Exception e) {
// should fail.
return;
}
assertTrue(false);
}
public void testDoLoop() {
assertEquals(10, test("i = 0; do { i++ } while (i != 10); i"));
}
public void testDoLoop2() {
assertEquals(50, test("i=100;do{i--}until(i==50); i"));
}
public void testForLoop() {
assertEquals("012345", test("String str = ''; for(i=0;i<6;i++) { str += i }; str"));
}
public void testForLoop2() {
assertEquals("012345", MVEL.eval("String str = ''; for(i=0;i<6;i++) { str += i }; str", new HashMap()));
}
public void testUntilLoop() {
assertEquals("012345", test("String str = ''; int i = 0; until (i == 6) { str += i++; }; str"));
}
public void testXX() {
test("foo = 100; !foo");
}
public void testEgressType1() {
assertEquals(Boolean.class, new ExpressionCompiler("foo != null").compile().getKnownEgressType());
}
public void testIncrementInBooleanStatement() {
assertEquals(true, test("hour++ < 61 && hour == 61"));
}
public void testIncrementInBooleanStatement2() {
assertEquals(true, test("++hour == 61"));
}
public void testDeepNestedLoopsInFunction() {
assertEquals(10, test("def increment(i) { i + 1 }; def ff(i) { x = 0; while (i < 1) { " +
"x++; while (i < 10) { i = increment(i); } }; if (x == 1) return i; else -1; }; i = 0; ff(i);"));
}
public void testArrayDefinitionWithInitializer() {
String[] compareTo = new String[]{"foo", "bar"};
String[] results = (String[]) test("new String[] { 'foo', 'bar' }");
for (int i = 0; i < compareTo.length; i++) {
if (!compareTo[i].equals(results[i])) throw new AssertionError("arrays do not match.");
}
}
public void testStaticallyTypedItemInForEach() {
assertEquals("1234", test("StringBuffer sbuf = new StringBuffer(); foreach (int i : new int[] { 1,2,3,4 }) { sbuf.append(i); }; sbuf.toString()"));
}
public void testStaticallyTypedLong() {
assertEquals(10l, test("10l"));
}
public void testCompileTimeCoercion() {
ParserContext ctx = new ParserContext();
ctx.setStrongTyping(true);
ctx.addInput("foo", Foo.class);
CompiledExpression c = new ExpressionCompiler("foo.bar.woof == 'true'").compile(ctx);
assertEquals(true, MVEL.executeExpression(c, createTestMap()));
}
}
| unit tests cleaned up | src/test/java/org/mvel/tests/core/CoreConfidenceTests.java | unit tests cleaned up |
|
Java | apache-2.0 | ee81d12f01e8dbf8c7f25ac5e3e03f6735836cfd | 0 | slide-lig/TopPI,slide-lig/TopPI | package fr.liglab.mining.internals;
import java.util.Arrays;
import java.util.Calendar;
import fr.liglab.mining.internals.FrequentsIterator;
import fr.liglab.mining.internals.Dataset.TransactionsIterable;
import fr.liglab.mining.internals.Selector.WrongFirstParentException;
import fr.liglab.mining.io.FileReader;
import fr.liglab.mining.util.ItemsetsFactory;
import gnu.trove.map.hash.TIntIntHashMap;
/**
* Represents an LCM recursion step. Its also acts as a Dataset factory.
*/
public final class ExplorationStep implements Cloneable {
public static boolean verbose = false;
public static boolean ultraVerbose = false;
public final static String KEY_VIEW_SUPPORT_THRESHOLD = "toplcm.threshold.view";
public final static String KEY_LONG_TRANSACTIONS_THRESHOLD = "toplcm.threshold.long";
/**
* @see longTransactionsMode
*/
static int LONG_TRANSACTION_MODE_THRESHOLD = Integer.parseInt(
System.getProperty(KEY_LONG_TRANSACTIONS_THRESHOLD, "2000"));
/**
* When projecting on a item having a support count above
* VIEW_SUPPORT_THRESHOLD%, projection will be a DatasetView
*/
static double VIEW_SUPPORT_THRESHOLD = Double.parseDouble(
System.getProperty(KEY_VIEW_SUPPORT_THRESHOLD, "0.15"));
/**
* When set to true we stick to a complete LCMv2 implementation, with predictive
* prefix-preservation tests and compressions at all steps.
* Setting this to false is better when mining top-k-per-item patterns.
*/
public static boolean LCM_STYLE = true;
/**
* closure of parent's pattern UNION extension
*/
public final int[] pattern;
/**
* Extension item that led to this recursion step. Already included in
* "pattern".
*/
public final int core_item;
public final Dataset dataset;
public final Counters counters;
/**
* Selectors chain - may be null when empty
*/
protected Selector selectChain;
protected final FrequentsIterator candidates;
/**
* When an extension fails first-parent test, it ends up in this map. Keys
* are non-first-parent items associated to their actual first parent.
*/
private final TIntIntHashMap failedFPTests;
private final boolean predictiveFPTestMode;
/**
* Start exploration on a dataset contained in a file.
*
* @param minimumSupport
* @param path
* to an input file in ASCII format. Each line should be a
* transaction containing space-separated item IDs.
*/
public ExplorationStep(int minimumSupport, String path) {
this.core_item = Integer.MAX_VALUE;
this.selectChain = null;
this.predictiveFPTestMode = false;
FileReader reader = new FileReader(path);
this.counters = new Counters(minimumSupport, reader);
reader.close(this.counters.renaming);
this.pattern = this.counters.closure;
this.dataset = new Dataset(this.counters, reader);
this.candidates = this.counters.getExtensionsIterator();
this.failedFPTests = new TIntIntHashMap();
}
private ExplorationStep(int[] pattern, int core_item, Dataset dataset, Counters counters, Selector selectChain,
FrequentsIterator candidates, TIntIntHashMap failedFPTests, boolean predictiveFPTestMode) {
super();
this.pattern = pattern;
this.core_item = core_item;
this.dataset = dataset;
this.counters = counters;
this.selectChain = selectChain;
this.candidates = candidates;
this.failedFPTests = failedFPTests;
this.predictiveFPTestMode = predictiveFPTestMode;
}
/**
* Finds an extension for current pattern in current dataset and returns the
* corresponding ExplorationStep (extensions are enumerated by ascending
* item IDs - in internal rebasing) Returns null when all valid extensions
* have been generated
*/
public ExplorationStep next() {
if (this.candidates == null) {
return null;
}
while (true) {
int candidate = this.candidates.next();
if (candidate < 0) {
return null;
}
try {
if (this.selectChain == null || this.selectChain.select(candidate, this)) {
TransactionsIterable support = this.dataset.getSupport(candidate);
// System.out.println("extending "+Arrays.toString(this.pattern)+
// " with "+
// candidate+" ("+this.counters.getReverseRenaming()[candidate]+")");
Counters candidateCounts = new Counters(this.counters.minSupport, support.iterator(), candidate,
this.dataset.getIgnoredItems(), this.counters.maxFrequent);
int greatest = Integer.MIN_VALUE;
for (int i = 0; i < candidateCounts.closure.length; i++) {
if (candidateCounts.closure[i] > greatest) {
greatest = candidateCounts.closure[i];
}
}
if (greatest > candidate) {
throw new WrongFirstParentException(candidate, greatest);
}
// instanciateDataset may choose to compress renaming - if
// not, at least it's set for now.
candidateCounts.reuseRenaming(this.counters.reverseRenaming);
return new ExplorationStep(this, candidate, candidateCounts, support);
}
} catch (WrongFirstParentException e) {
addFailedFPTest(e.extension, e.firstParent);
}
}
}
/**
* Instantiate state for a valid extension.
*
* @param parent
* @param extension
* a first-parent extension from parent step
* @param candidateCounts
* extension's counters from parent step
* @param support
* previously-computed extension's support
*/
protected ExplorationStep(ExplorationStep parent, int extension, Counters candidateCounts,
TransactionsIterable support) {
this.core_item = extension;
this.counters = candidateCounts;
int[] reverseRenaming = parent.counters.reverseRenaming;
if (verbose) {
if (parent.pattern.length == 0 || ultraVerbose) {
System.err.format("{\"time\":\"%1$tY/%1$tm/%1$td %1$tk:%1$tM:%1$tS\",\"thread\":%2$d,\"pattern\":%3$s,\"extension_internal\":%4$d,\"extension\":%5$d}\n",
Calendar.getInstance(), Thread.currentThread().getId(), Arrays.toString(parent.pattern),
extension, reverseRenaming[extension]);
}
}
this.pattern = ItemsetsFactory
.extendRename(candidateCounts.closure, extension, parent.pattern, reverseRenaming);
if (this.counters.nbFrequents == 0 || this.counters.distinctTransactionsCount == 0) {
this.candidates = null;
this.failedFPTests = null;
this.selectChain = null;
this.dataset = null;
this.predictiveFPTestMode = false;
} else {
this.failedFPTests = new TIntIntHashMap();
if (parent.selectChain == null) {
this.selectChain = null;
} else {
this.selectChain = parent.selectChain.copy();
}
// ! \\ From here, order is important
if (parent.predictiveFPTestMode) {
this.predictiveFPTestMode = true;
} else {
final int averageLen = candidateCounts.distinctTransactionLengthSum
/ candidateCounts.distinctTransactionsCount;
this.predictiveFPTestMode = LCM_STYLE || averageLen > LONG_TRANSACTION_MODE_THRESHOLD;
if (this.predictiveFPTestMode) {
this.selectChain = new FirstParentTest(this.selectChain);
}
}
// indeed, instantiateDataset is influenced by longTransactionsMode
this.dataset = instanciateDataset(parent, support);
// and intanciateDataset may choose to trigger some renaming in counters
this.candidates = this.counters.getExtensionsIterator();
}
}
private Dataset instanciateDataset(ExplorationStep parent, TransactionsIterable support) {
final double supportRate = this.counters.distinctTransactionsCount
/ (double) parent.dataset.getStoredTransactionsCount();
if (!this.predictiveFPTestMode && (supportRate) > VIEW_SUPPORT_THRESHOLD) {
return new DatasetView(parent.dataset, this.counters, support, this.core_item);
} else {
final int[] renaming = this.counters.compressRenaming(parent.counters.getReverseRenaming());
TransactionsRenamingDecorator filtered = new TransactionsRenamingDecorator(support.iterator(), renaming);
final int tidsLimit = this.predictiveFPTestMode ? Integer.MAX_VALUE : this.counters.getMaxCandidate()+1;
try {
Dataset dataset = new Dataset(this.counters, filtered, tidsLimit);
if (LCM_STYLE) {
dataset.compress(this.core_item);
}
return dataset;
} catch (ArrayIndexOutOfBoundsException e) {
System.out.println("WAT core_item = "+this.core_item);
e.printStackTrace();
System.exit(1);
}
return null;
}
}
public int getFailedFPTest(final int item) {
synchronized (this.failedFPTests) {
return this.failedFPTests.get(item);
}
}
private void addFailedFPTest(final int item, final int firstParent) {
synchronized (this.failedFPTests) {
this.failedFPTests.put(item, firstParent);
}
}
public void appendSelector(Selector s) {
if (this.selectChain == null) {
this.selectChain = s;
} else {
this.selectChain = this.selectChain.append(s);
}
}
public int getCatchedWrongFirstParentCount() {
if (this.failedFPTests == null) {
return 0;
} else {
return this.failedFPTests.size();
}
}
public ExplorationStep copy() {
return new ExplorationStep(pattern, core_item, dataset.clone(), counters.clone(), selectChain, candidates,
failedFPTests, predictiveFPTestMode);
}
public Progress getProgression() {
return new Progress();
}
public final class Progress {
public final int current;
public final int last;
protected Progress() {
this.current = candidates.peek();
this.last = candidates.last();
}
}
}
| src/main/java/fr/liglab/mining/internals/ExplorationStep.java | package fr.liglab.mining.internals;
import java.util.Arrays;
import java.util.Calendar;
import fr.liglab.mining.internals.FrequentsIterator;
import fr.liglab.mining.internals.Dataset.TransactionsIterable;
import fr.liglab.mining.internals.Selector.WrongFirstParentException;
import fr.liglab.mining.io.FileReader;
import fr.liglab.mining.util.ItemsetsFactory;
import gnu.trove.map.hash.TIntIntHashMap;
/**
* Represents an LCM recursion step. Its also acts as a Dataset factory.
*/
public final class ExplorationStep implements Cloneable {
public static boolean verbose = false;
public static boolean ultraVerbose = false;
public final static String KEY_VIEW_SUPPORT_THRESHOLD = "toplcm.threshold.view";
public final static String KEY_LONG_TRANSACTIONS_THRESHOLD = "toplcm.threshold.long";
/**
* @see longTransactionsMode
*/
static int LONG_TRANSACTION_MODE_THRESHOLD = Integer.parseInt(
System.getProperty(KEY_LONG_TRANSACTIONS_THRESHOLD, "2000"));
/**
* When projecting on a item having a support count above
* VIEW_SUPPORT_THRESHOLD%, projection will be a DatasetView
*/
static double VIEW_SUPPORT_THRESHOLD = Double.parseDouble(
System.getProperty(KEY_VIEW_SUPPORT_THRESHOLD, "0.15"));
/**
* When set to true we stick to a complete LCMv2 implementation, with predictive
* prefix-preservation tests and compressions at all steps.
* Setting this to false is better when mining top-k-per-item patterns.
*/
public static boolean LCM_STYLE = true;
/**
* closure of parent's pattern UNION extension
*/
public final int[] pattern;
/**
* Extension item that led to this recursion step. Already included in
* "pattern".
*/
public final int core_item;
public final Dataset dataset;
public final Counters counters;
/**
* Selectors chain - may be null when empty
*/
protected Selector selectChain;
protected final FrequentsIterator candidates;
/**
* When an extension fails first-parent test, it ends up in this map. Keys
* are non-first-parent items associated to their actual first parent.
*/
private final TIntIntHashMap failedFPTests;
private final boolean predictiveFPTestMode;
/**
* Start exploration on a dataset contained in a file.
*
* @param minimumSupport
* @param path
* to an input file in ASCII format. Each line should be a
* transaction containing space-separated item IDs.
*/
public ExplorationStep(int minimumSupport, String path) {
this.core_item = Integer.MAX_VALUE;
this.selectChain = null;
this.predictiveFPTestMode = false;
FileReader reader = new FileReader(path);
this.counters = new Counters(minimumSupport, reader);
reader.close(this.counters.renaming);
this.pattern = this.counters.closure;
this.dataset = new Dataset(this.counters, reader);
this.candidates = this.counters.getExtensionsIterator();
this.failedFPTests = new TIntIntHashMap();
}
private ExplorationStep(int[] pattern, int core_item, Dataset dataset, Counters counters, Selector selectChain,
FrequentsIterator candidates, TIntIntHashMap failedFPTests, boolean predictiveFPTestMode) {
super();
this.pattern = pattern;
this.core_item = core_item;
this.dataset = dataset;
this.counters = counters;
this.selectChain = selectChain;
this.candidates = candidates;
this.failedFPTests = failedFPTests;
this.predictiveFPTestMode = predictiveFPTestMode;
}
/**
* Finds an extension for current pattern in current dataset and returns the
* corresponding ExplorationStep (extensions are enumerated by ascending
* item IDs - in internal rebasing) Returns null when all valid extensions
* have been generated
*/
public ExplorationStep next() {
if (this.candidates == null) {
return null;
}
while (true) {
int candidate = this.candidates.next();
if (candidate < 0) {
return null;
}
try {
if (this.selectChain == null || this.selectChain.select(candidate, this)) {
TransactionsIterable support = this.dataset.getSupport(candidate);
// System.out.println("extending "+Arrays.toString(this.pattern)+
// " with "+
// candidate+" ("+this.counters.getReverseRenaming()[candidate]+")");
Counters candidateCounts = new Counters(this.counters.minSupport, support.iterator(), candidate,
this.dataset.getIgnoredItems(), this.counters.maxFrequent);
int greatest = Integer.MIN_VALUE;
for (int i = 0; i < candidateCounts.closure.length; i++) {
if (candidateCounts.closure[i] > greatest) {
greatest = candidateCounts.closure[i];
}
}
if (greatest > candidate) {
throw new WrongFirstParentException(candidate, greatest);
}
// instanciateDataset may choose to compress renaming - if
// not, at least it's set for now.
candidateCounts.reuseRenaming(this.counters.reverseRenaming);
return new ExplorationStep(this, candidate, candidateCounts, support);
}
} catch (WrongFirstParentException e) {
addFailedFPTest(e.extension, e.firstParent);
}
}
}
/**
* Instantiate state for a valid extension.
*
* @param parent
* @param extension
* a first-parent extension from parent step
* @param candidateCounts
* extension's counters from parent step
* @param support
* previously-computed extension's support
*/
protected ExplorationStep(ExplorationStep parent, int extension, Counters candidateCounts,
TransactionsIterable support) {
this.core_item = extension;
this.counters = candidateCounts;
int[] reverseRenaming = parent.counters.reverseRenaming;
if (verbose) {
if (parent.pattern.length == 0 || ultraVerbose) {
System.err.format("%1$tY/%1$tm/%1$td %1$tk:%1$tM:%1$tS - thread %2$d projecting %3$s with %4$s\n",
Calendar.getInstance(), Thread.currentThread().getId(), Arrays.toString(parent.pattern),
reverseRenaming[extension]);
}
}
this.pattern = ItemsetsFactory
.extendRename(candidateCounts.closure, extension, parent.pattern, reverseRenaming);
if (this.counters.nbFrequents == 0 || this.counters.distinctTransactionsCount == 0) {
this.candidates = null;
this.failedFPTests = null;
this.selectChain = null;
this.dataset = null;
this.predictiveFPTestMode = false;
} else {
this.failedFPTests = new TIntIntHashMap();
if (parent.selectChain == null) {
this.selectChain = null;
} else {
this.selectChain = parent.selectChain.copy();
}
// ! \\ From here, order is important
if (parent.predictiveFPTestMode) {
this.predictiveFPTestMode = true;
} else {
final int averageLen = candidateCounts.distinctTransactionLengthSum
/ candidateCounts.distinctTransactionsCount;
this.predictiveFPTestMode = LCM_STYLE || averageLen > LONG_TRANSACTION_MODE_THRESHOLD;
if (this.predictiveFPTestMode) {
this.selectChain = new FirstParentTest(this.selectChain);
}
}
// indeed, instantiateDataset is influenced by longTransactionsMode
this.dataset = instanciateDataset(parent, support);
// and intanciateDataset may choose to trigger some renaming in counters
this.candidates = this.counters.getExtensionsIterator();
}
}
private Dataset instanciateDataset(ExplorationStep parent, TransactionsIterable support) {
final double supportRate = this.counters.distinctTransactionsCount
/ (double) parent.dataset.getStoredTransactionsCount();
if (!this.predictiveFPTestMode && (supportRate) > VIEW_SUPPORT_THRESHOLD) {
return new DatasetView(parent.dataset, this.counters, support, this.core_item);
} else {
final int[] renaming = this.counters.compressRenaming(parent.counters.getReverseRenaming());
TransactionsRenamingDecorator filtered = new TransactionsRenamingDecorator(support.iterator(), renaming);
final int tidsLimit = this.predictiveFPTestMode ? Integer.MAX_VALUE : this.counters.getMaxCandidate()+1;
try {
Dataset dataset = new Dataset(this.counters, filtered, tidsLimit);
if (LCM_STYLE) {
dataset.compress(this.core_item);
}
return dataset;
} catch (ArrayIndexOutOfBoundsException e) {
System.out.println("WAT core_item = "+this.core_item);
e.printStackTrace();
System.exit(1);
}
return null;
}
}
public int getFailedFPTest(final int item) {
synchronized (this.failedFPTests) {
return this.failedFPTests.get(item);
}
}
private void addFailedFPTest(final int item, final int firstParent) {
synchronized (this.failedFPTests) {
this.failedFPTests.put(item, firstParent);
}
}
public void appendSelector(Selector s) {
if (this.selectChain == null) {
this.selectChain = s;
} else {
this.selectChain = this.selectChain.append(s);
}
}
public int getCatchedWrongFirstParentCount() {
if (this.failedFPTests == null) {
return 0;
} else {
return this.failedFPTests.size();
}
}
public ExplorationStep copy() {
return new ExplorationStep(pattern, core_item, dataset.clone(), counters.clone(), selectChain, candidates,
failedFPTests, predictiveFPTestMode);
}
public Progress getProgression() {
return new Progress();
}
public final class Progress {
public final int current;
public final int last;
protected Progress() {
this.current = candidates.peek();
this.last = candidates.last();
}
}
}
| now options -v and -V speak in JSON too
| src/main/java/fr/liglab/mining/internals/ExplorationStep.java | now options -v and -V speak in JSON too |
|
Java | apache-2.0 | 4cf1e6c853ab69fb338cb6e92916185502eed1db | 0 | apache/pdfbox,kalaspuffar/pdfbox,kalaspuffar/pdfbox,apache/pdfbox | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.pdmodel.graphics.image;
import java.awt.Color;
import java.awt.color.ICC_ColorSpace;
import java.awt.color.ICC_Profile;
import java.awt.image.BufferedImage;
import java.awt.image.ColorModel;
import java.awt.image.ComponentColorModel;
import java.awt.image.WritableRaster;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Hashtable;
import javax.imageio.ImageIO;
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.io.IOUtils;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDPage;
import org.apache.pdfbox.pdmodel.PDPageContentStream;
import org.apache.pdfbox.pdmodel.graphics.color.PDICCBased;
import org.apache.pdfbox.pdmodel.graphics.color.PDIndexed;
import static org.apache.pdfbox.pdmodel.graphics.image.ValidateXImage.checkIdent;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import org.junit.Before;
import org.junit.Test;
public class PNGConverterTest
{
@Before
public void setup()
{
//noinspection ResultOfMethodCallIgnored
parentDir.mkdirs();
}
/**
* This "test" just dumps the list of constants for the PNGConverter CHUNK_??? types, so that
* it can just be copy&pasted into the PNGConverter class.
*/
//@Test
public void dumpChunkTypes()
{
final String[] chunkTypes = { "IHDR", "IDAT", "PLTE", "IEND", "tRNS", "cHRM", "gAMA",
"iCCP", "sBIT", "sRGB", "tEXt", "zTXt", "iTXt", "kBKG", "hIST", "pHYs", "sPLT",
"tIME" };
for (String chunkType : chunkTypes)
{
byte[] bytes = chunkType.getBytes();
assertEquals(4, bytes.length);
System.out.println(String.format("\tprivate static final int CHUNK_" + chunkType
+ " = 0x%02X%02X%02X%02X; // %s: %d %d %d %d", (int) bytes[0] & 0xFF,
(int) bytes[1] & 0xFF, (int) bytes[2] & 0xFF, (int) bytes[3] & 0xFF, chunkType,
(int) bytes[0] & 0xFF, (int) bytes[1] & 0xFF, (int) bytes[2] & 0xFF,
(int) bytes[3] & 0xFF));
}
}
@Test
public void testImageConversionRGB() throws IOException
{
checkImageConvert("png.png");
}
@Test
public void testImageConversionRGBGamma() throws IOException
{
checkImageConvert("png_rgb_gamma.png");
}
@Test
public void testImageConversionRGB16BitICC() throws IOException
{
checkImageConvert("png_rgb_romm_16bit.png");
}
@Test
public void testImageConversionRGBIndexed() throws IOException
{
checkImageConvert("png_indexed.png");
}
@Test
public void testImageConversionRGBIndexedAlpha1Bit() throws IOException
{
checkImageConvert("png_indexed_1bit_alpha.png");
}
@Test
public void testImageConversionRGBIndexedAlpha2Bit() throws IOException
{
checkImageConvert("png_indexed_2bit_alpha.png");
}
@Test
public void testImageConversionRGBIndexedAlpha4Bit() throws IOException
{
checkImageConvert("png_indexed_4bit_alpha.png");
}
@Test
public void testImageConversionRGBIndexedAlpha8Bit() throws IOException
{
checkImageConvert("png_indexed_8bit_alpha.png");
}
@Test
public void testImageConversionRGBAlpha() throws IOException
{
// We can't handle Alpha RGB
checkImageConvertFail("png_alpha_rgb.png");
}
@Test
public void testImageConversionGrayAlpha() throws IOException
{
// We can't handle Alpha RGB
checkImageConvertFail("png_alpha_gray.png");
}
@Test
public void testImageConversionGray() throws IOException
{
checkImageConvertFail("png_gray.png");
}
@Test
public void testImageConversionGrayGamma() throws IOException
{
checkImageConvertFail("png_gray_with_gama.png");
}
private final File parentDir = new File("target/test-output/graphics/graphics");
private void checkImageConvertFail(String name) throws IOException
{
try (PDDocument doc = new PDDocument())
{
byte[] imageBytes = IOUtils.toByteArray(PNGConverterTest.class.getResourceAsStream(name));
PDImageXObject pdImageXObject = PNGConverter.convertPNGImage(doc, imageBytes);
assertNull(pdImageXObject);
}
}
private void checkImageConvert(String name) throws IOException
{
try (PDDocument doc = new PDDocument())
{
byte[] imageBytes = IOUtils.toByteArray(PNGConverterTest.class.getResourceAsStream(name));
PDImageXObject pdImageXObject = PNGConverter.convertPNGImage(doc, imageBytes);
assertNotNull(pdImageXObject);
ICC_Profile imageProfile = null;
if (pdImageXObject.getColorSpace() instanceof PDICCBased)
{
// Make sure that ICC profile is a valid one
PDICCBased iccColorSpace = (PDICCBased) pdImageXObject.getColorSpace();
imageProfile = ICC_Profile.getInstance(iccColorSpace.getPDStream().toByteArray());
}
PDPage page = new PDPage();
doc.addPage(page);
try (PDPageContentStream contentStream = new PDPageContentStream(doc, page))
{
contentStream.setNonStrokingColor(Color.PINK);
contentStream.addRect(0, 0, page.getCropBox().getWidth(), page.getCropBox().getHeight());
contentStream.fill();
contentStream.drawImage(pdImageXObject, 0, 0, pdImageXObject.getWidth(),
pdImageXObject.getHeight());
}
doc.save(new File(parentDir, name + ".pdf"));
BufferedImage image = pdImageXObject.getImage();
BufferedImage expectedImage = ImageIO.read(new ByteArrayInputStream(imageBytes));
if (imageProfile != null && expectedImage.getColorModel().getColorSpace().isCS_sRGB())
{
// The image has an embedded ICC Profile, but the default java PNG
// reader does not correctly read that.
expectedImage = getImageWithProfileData(expectedImage, imageProfile);
}
checkIdent(expectedImage, image);
}
}
public static BufferedImage getImageWithProfileData(BufferedImage sourceImage,
ICC_Profile realProfile)
{
Hashtable<String, Object> properties = new Hashtable<>();
String[] propertyNames = sourceImage.getPropertyNames();
if (propertyNames != null)
{
for (String propertyName : propertyNames)
{
properties.put(propertyName, sourceImage.getProperty(propertyName));
}
}
ComponentColorModel oldColorModel = (ComponentColorModel) sourceImage.getColorModel();
boolean hasAlpha = oldColorModel.hasAlpha();
int transparency = oldColorModel.getTransparency();
boolean alphaPremultiplied = oldColorModel.isAlphaPremultiplied();
WritableRaster raster = sourceImage.getRaster();
int dataType = raster.getDataBuffer().getDataType();
int[] componentSize = oldColorModel.getComponentSize();
final ColorModel colorModel = new ComponentColorModel(new ICC_ColorSpace(realProfile),
componentSize, hasAlpha, alphaPremultiplied, transparency, dataType);
return new BufferedImage(colorModel, raster, sourceImage.isAlphaPremultiplied(),
properties);
}
@Test
public void testCheckConverterState()
{
assertFalse(PNGConverter.checkConverterState(null));
PNGConverter.PNGConverterState state = new PNGConverter.PNGConverterState();
assertFalse(PNGConverter.checkConverterState(state));
PNGConverter.Chunk invalidChunk = new PNGConverter.Chunk();
invalidChunk.bytes = new byte[0];
assertFalse(PNGConverter.checkChunkSane(invalidChunk));
// Valid Dummy Chunk
PNGConverter.Chunk validChunk = new PNGConverter.Chunk();
validChunk.bytes = new byte[16];
validChunk.start = 4;
validChunk.length = 8;
validChunk.crc = 2077607535;
assertTrue(PNGConverter.checkChunkSane(validChunk));
state.IHDR = invalidChunk;
assertFalse(PNGConverter.checkConverterState(state));
state.IDATs = Collections.singletonList(validChunk);
assertFalse(PNGConverter.checkConverterState(state));
state.IHDR = validChunk;
assertTrue(PNGConverter.checkConverterState(state));
state.IDATs = new ArrayList<>();
assertFalse(PNGConverter.checkConverterState(state));
state.IDATs = Collections.singletonList(validChunk);
assertTrue(PNGConverter.checkConverterState(state));
state.PLTE = invalidChunk;
assertFalse(PNGConverter.checkConverterState(state));
state.PLTE = validChunk;
assertTrue(PNGConverter.checkConverterState(state));
state.cHRM = invalidChunk;
assertFalse(PNGConverter.checkConverterState(state));
state.cHRM = validChunk;
assertTrue(PNGConverter.checkConverterState(state));
state.tRNS = invalidChunk;
assertFalse(PNGConverter.checkConverterState(state));
state.tRNS = validChunk;
assertTrue(PNGConverter.checkConverterState(state));
state.iCCP = invalidChunk;
assertFalse(PNGConverter.checkConverterState(state));
state.iCCP = validChunk;
assertTrue(PNGConverter.checkConverterState(state));
state.sRGB = invalidChunk;
assertFalse(PNGConverter.checkConverterState(state));
state.sRGB = validChunk;
assertTrue(PNGConverter.checkConverterState(state));
state.gAMA = invalidChunk;
assertFalse(PNGConverter.checkConverterState(state));
state.gAMA = validChunk;
assertTrue(PNGConverter.checkConverterState(state));
state.IDATs = Arrays.asList(validChunk, invalidChunk);
assertFalse(PNGConverter.checkConverterState(state));
}
@Test
public void testChunkSane()
{
PNGConverter.Chunk chunk = new PNGConverter.Chunk();
assertTrue(PNGConverter.checkChunkSane(null));
chunk.bytes = "IHDRsomedummyvaluesDummyValuesAtEnd".getBytes();
chunk.length = 19;
assertEquals(35, chunk.bytes.length);
assertEquals("IHDRsomedummyvalues", new String(chunk.getData()));
assertFalse(PNGConverter.checkChunkSane(chunk));
chunk.start = 4;
assertEquals("somedummyvaluesDumm", new String(chunk.getData()));
assertFalse(PNGConverter.checkChunkSane(chunk));
chunk.crc = -1729802258;
assertTrue(PNGConverter.checkChunkSane(chunk));
chunk.start = 6;
assertFalse(PNGConverter.checkChunkSane(chunk));
chunk.length = 60;
assertFalse(PNGConverter.checkChunkSane(chunk));
}
@Test
public void testCRCImpl()
{
byte[] b1 = "Hello World!".getBytes();
assertEquals(472456355, PNGConverter.crc(b1, 0, b1.length));
assertEquals(-632335482, PNGConverter.crc(b1, 2, b1.length - 4));
}
@Test
public void testMapPNGRenderIntent()
{
assertEquals(COSName.PERCEPTUAL, PNGConverter.mapPNGRenderIntent(0));
assertEquals(COSName.RELATIVE_COLORIMETRIC, PNGConverter.mapPNGRenderIntent(1));
assertEquals(COSName.SATURATION, PNGConverter.mapPNGRenderIntent(2));
assertEquals(COSName.ABSOLUTE_COLORIMETRIC, PNGConverter.mapPNGRenderIntent(3));
assertNull(PNGConverter.mapPNGRenderIntent(-1));
assertNull(PNGConverter.mapPNGRenderIntent(4));
}
/**
* Test code coverage for /Intent /Perceptual and for sRGB icc profile in indexed colorspace.
*
* @throws IOException
*/
@Test
public void testImageConversionIntentIndexed() throws IOException
{
try (PDDocument doc = new PDDocument())
{
byte[] imageBytes = IOUtils.toByteArray(PNGConverterTest.class.getResourceAsStream("929316.png"));
PDImageXObject pdImageXObject = PNGConverter.convertPNGImage(doc, imageBytes);
assertEquals(COSName.PERCEPTUAL, pdImageXObject.getCOSObject().getItem(COSName.INTENT));
// Check that this image gets an indexed colorspace with sRGB ICC based colorspace
PDIndexed indexedColorspace = (PDIndexed) pdImageXObject.getColorSpace();
PDICCBased iccColorspace = (PDICCBased) indexedColorspace.getBaseColorSpace();
// validity of ICC CS is tested in checkImageConvert
assertTrue(iccColorspace.issRGB());
}
checkImageConvert("929316.png");
}
}
| pdfbox/src/test/java/org/apache/pdfbox/pdmodel/graphics/image/PNGConverterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.pdmodel.graphics.image;
import java.awt.Color;
import java.awt.color.ICC_ColorSpace;
import java.awt.color.ICC_Profile;
import java.awt.image.BufferedImage;
import java.awt.image.ColorModel;
import java.awt.image.ComponentColorModel;
import java.awt.image.WritableRaster;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Hashtable;
import javax.imageio.ImageIO;
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.io.IOUtils;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDPage;
import org.apache.pdfbox.pdmodel.PDPageContentStream;
import org.apache.pdfbox.pdmodel.graphics.color.PDICCBased;
import org.apache.pdfbox.pdmodel.graphics.color.PDIndexed;
import static org.apache.pdfbox.pdmodel.graphics.image.ValidateXImage.checkIdent;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import org.junit.Before;
import org.junit.Test;
public class PNGConverterTest
{
@Before
public void setup()
{
//noinspection ResultOfMethodCallIgnored
parentDir.mkdirs();
}
/**
* This "test" just dumps the list of constants for the PNGConverter CHUNK_??? types, so that
* it can just be copy&pasted into the PNGConverter class.
*/
//@Test
public void dumpChunkTypes()
{
final String[] chunkTypes = { "IHDR", "IDAT", "PLTE", "IEND", "tRNS", "cHRM", "gAMA",
"iCCP", "sBIT", "sRGB", "tEXt", "zTXt", "iTXt", "kBKG", "hIST", "pHYs", "sPLT",
"tIME" };
for (String chunkType : chunkTypes)
{
byte[] bytes = chunkType.getBytes();
assertEquals(4, bytes.length);
System.out.println(String.format("\tprivate static final int CHUNK_" + chunkType
+ " = 0x%02X%02X%02X%02X; // %s: %d %d %d %d", (int) bytes[0] & 0xFF,
(int) bytes[1] & 0xFF, (int) bytes[2] & 0xFF, (int) bytes[3] & 0xFF, chunkType,
(int) bytes[0] & 0xFF, (int) bytes[1] & 0xFF, (int) bytes[2] & 0xFF,
(int) bytes[3] & 0xFF));
}
}
@Test
public void testImageConversionRGB() throws IOException
{
checkImageConvert("png.png");
}
@Test
public void testImageConversionRGBGamma() throws IOException
{
checkImageConvert("png_rgb_gamma.png");
}
@Test
public void testImageConversionRGB16BitICC() throws IOException
{
checkImageConvert("png_rgb_romm_16bit.png");
}
@Test
public void testImageConversionRGBIndexed() throws IOException
{
checkImageConvert("png_indexed.png");
}
@Test
public void testImageConversionRGBIndexedAlpha1Bit() throws IOException
{
checkImageConvert("png_indexed_1bit_alpha.png");
}
@Test
public void testImageConversionRGBIndexedAlpha2Bit() throws IOException
{
checkImageConvert("png_indexed_2bit_alpha.png");
}
@Test
public void testImageConversionRGBIndexedAlpha4Bit() throws IOException
{
checkImageConvert("png_indexed_4bit_alpha.png");
}
@Test
public void testImageConversionRGBIndexedAlpha8Bit() throws IOException
{
checkImageConvert("png_indexed_8bit_alpha.png");
}
@Test
public void testImageConversionRGBAlpha() throws IOException
{
// We can't handle Alpha RGB
checkImageConvertFail("png_alpha_rgb.png");
}
@Test
public void testImageConversionGrayAlpha() throws IOException
{
// We can't handle Alpha RGB
checkImageConvertFail("png_alpha_gray.png");
}
@Test
public void testImageConversionGray() throws IOException
{
checkImageConvertFail("png_gray.png");
}
@Test
public void testImageConversionGrayGamma() throws IOException
{
checkImageConvertFail("png_gray_with_gama.png");
}
private final File parentDir = new File("target/test-output/graphics/graphics");
private void checkImageConvertFail(String name) throws IOException
{
try (PDDocument doc = new PDDocument())
{
byte[] imageBytes = IOUtils.toByteArray(PNGConverterTest.class.getResourceAsStream(name));
PDImageXObject pdImageXObject = PNGConverter.convertPNGImage(doc, imageBytes);
assertNull(pdImageXObject);
}
}
private void checkImageConvert(String name) throws IOException
{
try (PDDocument doc = new PDDocument())
{
byte[] imageBytes = IOUtils.toByteArray(PNGConverterTest.class.getResourceAsStream(name));
PDImageXObject pdImageXObject = PNGConverter.convertPNGImage(doc, imageBytes);
assertNotNull(pdImageXObject);
ICC_Profile imageProfile = null;
if (pdImageXObject.getColorSpace() instanceof PDICCBased)
{
// Make sure that ICC profile is a valid one
PDICCBased iccColorSpace = (PDICCBased) pdImageXObject.getColorSpace();
imageProfile = ICC_Profile.getInstance(iccColorSpace.getPDStream().toByteArray());
}
PDPage page = new PDPage();
doc.addPage(page);
try (PDPageContentStream contentStream = new PDPageContentStream(doc, page))
{
contentStream.setNonStrokingColor(Color.PINK);
contentStream.addRect(0, 0, page.getCropBox().getWidth(), page.getCropBox().getHeight());
contentStream.fill();
contentStream.drawImage(pdImageXObject, 0, 0, pdImageXObject.getWidth(),
pdImageXObject.getHeight());
}
doc.save(new File(parentDir, name + ".pdf"));
BufferedImage image = pdImageXObject.getImage();
BufferedImage expectedImage = ImageIO.read(new ByteArrayInputStream(imageBytes));
if (imageProfile != null && expectedImage.getColorModel().getColorSpace().isCS_sRGB())
{
// The image has an embedded ICC Profile, but the default java PNG
// reader does not correctly read that.
expectedImage = getImageWithProfileData(expectedImage, imageProfile);
}
checkIdent(expectedImage, image);
}
}
public static BufferedImage getImageWithProfileData(BufferedImage sourceImage,
ICC_Profile realProfile)
{
Hashtable<String, Object> properties = new Hashtable<>();
String[] propertyNames = sourceImage.getPropertyNames();
if (propertyNames != null)
{
for (String propertyName : propertyNames)
{
properties.put(propertyName, sourceImage.getProperty(propertyName));
}
}
ComponentColorModel oldColorModel = (ComponentColorModel) sourceImage.getColorModel();
boolean hasAlpha = oldColorModel.hasAlpha();
int transparency = oldColorModel.getTransparency();
boolean alphaPremultiplied = oldColorModel.isAlphaPremultiplied();
WritableRaster raster = sourceImage.getRaster();
int dataType = raster.getDataBuffer().getDataType();
int[] componentSize = oldColorModel.getComponentSize();
final ColorModel colorModel = new ComponentColorModel(new ICC_ColorSpace(realProfile),
componentSize, hasAlpha, alphaPremultiplied, transparency, dataType);
return new BufferedImage(colorModel, raster, sourceImage.isAlphaPremultiplied(),
properties);
}
@Test
public void testCheckConverterState()
{
assertFalse(PNGConverter.checkConverterState(null));
PNGConverter.PNGConverterState state = new PNGConverter.PNGConverterState();
assertFalse(PNGConverter.checkConverterState(state));
PNGConverter.Chunk invalidChunk = new PNGConverter.Chunk();
invalidChunk.bytes = new byte[0];
assertFalse(PNGConverter.checkChunkSane(invalidChunk));
// Valid Dummy Chunk
PNGConverter.Chunk validChunk = new PNGConverter.Chunk();
validChunk.bytes = new byte[16];
validChunk.start = 4;
validChunk.length = 8;
validChunk.crc = 2077607535;
assertTrue(PNGConverter.checkChunkSane(validChunk));
state.IHDR = invalidChunk;
assertFalse(PNGConverter.checkConverterState(state));
state.IDATs = Collections.singletonList(validChunk);
assertFalse(PNGConverter.checkConverterState(state));
state.IHDR = validChunk;
assertTrue(PNGConverter.checkConverterState(state));
state.IDATs = new ArrayList<>();
assertFalse(PNGConverter.checkConverterState(state));
state.IDATs = Collections.singletonList(validChunk);
assertTrue(PNGConverter.checkConverterState(state));
state.PLTE = invalidChunk;
assertFalse(PNGConverter.checkConverterState(state));
state.PLTE = validChunk;
assertTrue(PNGConverter.checkConverterState(state));
state.cHRM = invalidChunk;
assertFalse(PNGConverter.checkConverterState(state));
state.cHRM = validChunk;
assertTrue(PNGConverter.checkConverterState(state));
state.tRNS = invalidChunk;
assertFalse(PNGConverter.checkConverterState(state));
state.tRNS = validChunk;
assertTrue(PNGConverter.checkConverterState(state));
state.iCCP = invalidChunk;
assertFalse(PNGConverter.checkConverterState(state));
state.iCCP = validChunk;
assertTrue(PNGConverter.checkConverterState(state));
state.sRGB = invalidChunk;
assertFalse(PNGConverter.checkConverterState(state));
state.sRGB = validChunk;
assertTrue(PNGConverter.checkConverterState(state));
state.gAMA = invalidChunk;
assertFalse(PNGConverter.checkConverterState(state));
state.gAMA = validChunk;
assertTrue(PNGConverter.checkConverterState(state));
state.IDATs = Arrays.asList(validChunk, invalidChunk);
assertFalse(PNGConverter.checkConverterState(state));
}
@Test
public void testChunkSane()
{
PNGConverter.Chunk chunk = new PNGConverter.Chunk();
assertTrue(PNGConverter.checkChunkSane(null));
chunk.bytes = "IHDRsomedummyvaluesDummyValuesAtEnd".getBytes();
chunk.length = 19;
assertEquals(35, chunk.bytes.length);
assertEquals("IHDRsomedummyvalues", new String(chunk.getData()));
assertFalse(PNGConverter.checkChunkSane(chunk));
chunk.start = 4;
assertEquals("somedummyvaluesDumm", new String(chunk.getData()));
assertFalse(PNGConverter.checkChunkSane(chunk));
chunk.crc = -1729802258;
assertTrue(PNGConverter.checkChunkSane(chunk));
chunk.start = 6;
assertFalse(PNGConverter.checkChunkSane(chunk));
chunk.length = 60;
assertFalse(PNGConverter.checkChunkSane(chunk));
}
@Test
public void testCRCImpl()
{
byte[] b1 = "Hello World!".getBytes();
assertEquals(472456355, PNGConverter.crc(b1, 0, b1.length));
assertEquals(-632335482, PNGConverter.crc(b1, 2, b1.length - 4));
}
@Test
public void testMapPNGRenderIntent()
{
assertEquals(COSName.PERCEPTUAL, PNGConverter.mapPNGRenderIntent(0));
assertEquals(COSName.RELATIVE_COLORIMETRIC, PNGConverter.mapPNGRenderIntent(1));
assertEquals(COSName.SATURATION, PNGConverter.mapPNGRenderIntent(2));
assertEquals(COSName.ABSOLUTE_COLORIMETRIC, PNGConverter.mapPNGRenderIntent(3));
assertNull(PNGConverter.mapPNGRenderIntent(-1));
assertNull(PNGConverter.mapPNGRenderIntent(4));
}
/**
* Test code coverage for /Intent /Perceptual and for sRGB icc profile in indexed colorspace.
*
* @throws IOException
*/
@Test
public void testImageConversionIntentIndexed() throws IOException
{
try (PDDocument doc = new PDDocument())
{
byte[] imageBytes = IOUtils.toByteArray(PNGConverterTest.class.getResourceAsStream("929316.png"));
PDImageXObject pdImageXObject = PNGConverter.convertPNGImage(doc, imageBytes);
assertEquals(COSName.PERCEPTUAL, pdImageXObject.getCOSObject().getItem(COSName.INTENT));
// Check that this image gets an indexed colorspace with sRGB ICC based colorspace
PDIndexed indexedColorspace = (PDIndexed) pdImageXObject.getColorSpace();
PDICCBased iccColorspace = (PDICCBased) indexedColorspace.getBaseColorSpace();
// validity of ICC CS is tested in checkImageConvert
assertTrue(iccColorspace.isIsRGB());
}
checkImageConvert("929316.png");
}
}
| PDFBOX-4847: correct method name
git-svn-id: c3ad59981690829a43dc34c293c4e2cd04bcd994@1881095 13f79535-47bb-0310-9956-ffa450edef68
| pdfbox/src/test/java/org/apache/pdfbox/pdmodel/graphics/image/PNGConverterTest.java | PDFBOX-4847: correct method name |
|
Java | apache-2.0 | 1ee0a17050929b8c5ee8cfa6e1f451e90af04523 | 0 | hekate-io/hekate | /*
* Copyright 2018 The Hekate Project
*
* The Hekate Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.hekate.dev;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.regex.Pattern;
import static java.util.stream.Collectors.toList;
/**
* Utility for source code examples inclusion into javadocs.
*/
public final class CodeSamplesProcessorMain {
private static final Charset UTF_8 = Charset.forName("UTF-8");
private static final String NL = System.lineSeparator();
private CodeSamplesProcessorMain() {
// No-op.
}
/**
* Runs this utility.
*
* @param args First arg - path to javadocs folder; second arg - list of folders where to
*
* @throws IOException File reading error.
*/
public static void main(String[] args) throws IOException {
String docPath = args[0];
String sourcePath = args[1];
process(docPath, sourcePath);
}
private static void process(String javadocSourcePath, String samplesSourcePath) throws IOException {
File javadocSource = new File(javadocSourcePath);
List<File> samplesSources = getSampleSources(samplesSourcePath);
say("Processing sample code:");
say(" samples - " + samplesSources.stream().map(File::getAbsolutePath).collect(toList()));
say(" target - " + javadocSource.getCanonicalPath());
if (!javadocSource.exists()) {
say("Skipped processing since javadoc source path doesn't exist [path=" + javadocSourcePath + ']');
return;
}
if (!javadocSource.isDirectory()) {
throw new IllegalArgumentException("Javadoc source path is not a directory [path=" + javadocSource.getAbsolutePath() + ']');
}
StringBuilder buf = new StringBuilder();
processDir(javadocSource, samplesSources, buf);
}
private static void processDir(File javadocSrcDir, List<File> samplesSrcDirs, StringBuilder buf) throws IOException {
File[] files = javadocSrcDir.listFiles(pathname -> pathname.isDirectory() || pathname.getName().endsWith(".html"));
if (files != null) {
for (File file : files) {
if (file.isDirectory()) {
processDir(file, samplesSrcDirs, buf);
} else {
processFile(file, samplesSrcDirs, buf);
buf.setLength(0);
}
}
}
}
private static void processFile(File javadocSource, List<File> samplesSrcDirs, StringBuilder buf) throws IOException {
boolean rewrite = false;
try (BufferedReader reader = Files.newBufferedReader(javadocSource.toPath(), UTF_8)) {
for (String s = reader.readLine(); s != null; s = reader.readLine()) {
int start;
int end = -1;
String pattern = "${source:";
start = s.indexOf(pattern);
if (start >= 0) {
start += pattern.length();
end = s.indexOf('}', start);
}
if (start >= 0 && end >= 0) {
String path = s.substring(start, end).trim();
String section = null;
int splitIdx = path.indexOf('#');
if (splitIdx >= 0) {
section = path.substring(splitIdx + 1).trim();
path = path.substring(0, splitIdx).trim();
}
if (path.isEmpty() || section != null && section.isEmpty()) {
throw new IllegalStateException("Failed to parse 'source' directive: " + s);
}
String brush = resolveBrush(path);
if (brush == null) {
throw new IllegalArgumentException("Failed to resolve brush from path '" + path + "'.");
}
File code = null;
for (File dir : samplesSrcDirs) {
code = new File(dir, path);
if (code.isFile()) {
break;
} else {
code = null;
}
}
if (code == null) {
throw new FileNotFoundException("Failed to find sample source file: " + path);
}
buf.append("<div class=\"doc_source\">").append(NL);
buf.append("<pre><code class=\"").append(brush).append("\">").append(NL);
writeSourceCode(code, section, buf, javadocSource);
buf.append("</code></pre>").append(NL);
buf.append("</div>").append(NL);
rewrite = true;
} else {
buf.append(s).append(NL);
}
}
}
if (rewrite) {
write(buf, javadocSource);
}
}
private static void writeSourceCode(File src, String section, StringBuilder out, File requester) throws IOException {
List<String> sectionLines = new LinkedList<>();
Integer minOffset = null;
boolean sectionFound = section == null;
Pattern start = Pattern.compile("\\s*((//)|(<!--)).*Start:\\s*" + section + ".*");
Pattern end = Pattern.compile("\\s*((//)|(<!--)).*End:\\s*" + section + ".*");
for (String line : Files.readAllLines(Paths.get(src.getAbsolutePath()))) {
if (sectionFound) {
if (end.matcher(line).matches()) {
break;
}
String realLine = line.trim();
if (realLine.isEmpty()) {
sectionLines.add(realLine);
} else {
int offset = getWhitespacesOffset(line);
if (minOffset == null || offset < minOffset) {
minOffset = offset;
}
sectionLines.add(line);
}
} else {
if (start.matcher(line).matches()) {
sectionFound = true;
}
}
}
if (!sectionFound) {
throw new IllegalStateException("Couldn't find '" + section + "' section in file "
+ src.getAbsolutePath() + " (required for (" + requester.getAbsolutePath() + ')');
}
Pattern lt = Pattern.compile("<");
Pattern gt = Pattern.compile(">");
Pattern publicStaticClass = Pattern.compile("public static class");
for (String line : sectionLines) {
String trimmed;
if (!line.isEmpty() && minOffset != null && minOffset > 0) {
trimmed = line.substring(minOffset);
} else {
trimmed = line;
}
// Replace 'public static class' with 'public class' since many code examples are implemented as inner classes.
trimmed = publicStaticClass.matcher(trimmed).replaceAll("public class");
out.append(lt.matcher(gt.matcher(trimmed).replaceAll(">")).replaceAll("<")).append(NL);
}
}
private static void write(StringBuilder src, File target) throws IOException {
try (BufferedWriter out = Files.newBufferedWriter(target.toPath(), UTF_8)) {
out.append(src);
out.flush();
}
}
@SuppressWarnings("UseOfSystemOutOrSystemErr")
private static void say(String msg) {
System.out.println(msg);
}
private static List<File> getSampleSources(String samplesSourcePath) {
String[] tokens = samplesSourcePath.split(";");
List<File> files = new ArrayList<>();
for (String token : tokens) {
File src = new File(token.trim());
if (src.isDirectory()) {
files.add(src);
}
}
return files;
}
private static String resolveBrush(String path) {
int dot = path.lastIndexOf('.');
if (dot >= 0 && dot < path.length() - 1) {
return path.substring(dot + 1);
}
return null;
}
private static int getWhitespacesOffset(String s) {
for (int i = 0; i < s.length(); i++) {
if (!Character.isSpaceChar(s.charAt(i))) {
return i;
}
}
return 0;
}
}
| hekate-dev-build/src/main/java/io/hekate/dev/CodeSamplesProcessorMain.java | /*
* Copyright 2018 The Hekate Project
*
* The Hekate Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.hekate.dev;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.regex.Pattern;
import static java.util.stream.Collectors.toList;
/**
* Utility for source code examples inclusion into javadocs.
*/
public final class CodeSamplesProcessorMain {
private static final String NL = System.lineSeparator();
private CodeSamplesProcessorMain() {
// No-op.
}
/**
* Runs this utility.
*
* @param args First arg - path to javadocs folder; second arg - list of folders where to
*
* @throws IOException File reading error.
*/
public static void main(String[] args) throws IOException {
String docPath = args[0];
String sourcePath = args[1];
process(docPath, sourcePath);
}
private static void process(String javadocSourcePath, String samplesSourcePath) throws IOException {
File javadocSource = new File(javadocSourcePath);
List<File> samplesSources = getSampleSources(samplesSourcePath);
say("Processing sample code:");
say(" samples - " + samplesSources.stream().map(File::getAbsolutePath).collect(toList()));
say(" target - " + javadocSource.getCanonicalPath());
if (!javadocSource.exists()) {
say("Skipped processing since javadoc source path doesn't exist [path=" + javadocSourcePath + ']');
return;
}
if (!javadocSource.isDirectory()) {
throw new IllegalArgumentException("Javadoc source path is not a directory [path=" + javadocSource.getAbsolutePath() + ']');
}
StringBuilder buf = new StringBuilder();
processDir(javadocSource, samplesSources, buf);
}
private static void processDir(File javadocSrcDir, List<File> samplesSrcDirs, StringBuilder buf) throws IOException {
File[] files = javadocSrcDir.listFiles(pathname -> pathname.isDirectory() || pathname.getName().endsWith(".html"));
if (files != null) {
for (File file : files) {
if (file.isDirectory()) {
processDir(file, samplesSrcDirs, buf);
} else {
processFile(file, samplesSrcDirs, buf);
buf.setLength(0);
}
}
}
}
private static void processFile(File javadocSource, List<File> samplesSrcDirs, StringBuilder buf) throws IOException {
boolean rewrite = false;
try (
FileInputStream fileIn = new FileInputStream(javadocSource);
Reader fileReader = new InputStreamReader(fileIn, "utf-8");
BufferedReader reader = new BufferedReader(fileReader)
) {
for (String s = reader.readLine(); s != null; s = reader.readLine()) {
int start;
int end = -1;
String pattern = "${source:";
start = s.indexOf(pattern);
if (start >= 0) {
start += pattern.length();
end = s.indexOf('}', start);
}
if (start >= 0 && end >= 0) {
String path = s.substring(start, end).trim();
String section = null;
int splitIdx = path.indexOf('#');
if (splitIdx >= 0) {
section = path.substring(splitIdx + 1).trim();
path = path.substring(0, splitIdx).trim();
}
if (path.isEmpty() || section != null && section.isEmpty()) {
throw new IllegalStateException("Failed to parse 'source' directive: " + s);
}
String brush = resolveBrush(path);
if (brush == null) {
throw new IllegalArgumentException("Failed to resolve brush from path '" + path + "'.");
}
File code = null;
for (File dir : samplesSrcDirs) {
code = new File(dir, path);
if (code.isFile()) {
break;
} else {
code = null;
}
}
if (code == null) {
throw new FileNotFoundException("Failed to find sample source file: " + path);
}
buf.append("<div class=\"doc_source\">").append(NL);
buf.append("<pre><code class=\"").append(brush).append("\">").append(NL);
writeSourceCode(code, section, buf, javadocSource);
buf.append("</code></pre>").append(NL);
buf.append("</div>").append(NL);
rewrite = true;
} else {
buf.append(s).append(NL);
}
}
}
if (rewrite) {
write(buf, javadocSource);
}
}
private static void writeSourceCode(File src, String section, StringBuilder out, File requester) throws IOException {
List<String> sectionLines = new LinkedList<>();
Integer minOffset = null;
boolean sectionFound = section == null;
Pattern start = Pattern.compile("\\s*((//)|(<!--)).*Start:\\s*" + section + ".*");
Pattern end = Pattern.compile("\\s*((//)|(<!--)).*End:\\s*" + section + ".*");
for (String line : Files.readAllLines(Paths.get(src.getAbsolutePath()))) {
if (sectionFound) {
if (end.matcher(line).matches()) {
break;
}
String realLine = line.trim();
if (realLine.isEmpty()) {
sectionLines.add(realLine);
} else {
int offset = getWhitespacesOffset(line);
if (minOffset == null || offset < minOffset) {
minOffset = offset;
}
sectionLines.add(line);
}
} else {
if (start.matcher(line).matches()) {
sectionFound = true;
}
}
}
if (!sectionFound) {
throw new IllegalStateException("Couldn't find '" + section + "' section in file "
+ src.getAbsolutePath() + " (required for (" + requester.getAbsolutePath() + ')');
}
Pattern lt = Pattern.compile("<");
Pattern gt = Pattern.compile(">");
Pattern publicStaticClass = Pattern.compile("public static class");
for (String line : sectionLines) {
String trimmed;
if (!line.isEmpty() && minOffset != null && minOffset > 0) {
trimmed = line.substring(minOffset);
} else {
trimmed = line;
}
// Replace 'public static class' with 'public class' since many code examples are implemented as inner classes.
trimmed = publicStaticClass.matcher(trimmed).replaceAll("public class");
out.append(lt.matcher(gt.matcher(trimmed).replaceAll(">")).replaceAll("<")).append(NL);
}
}
private static void write(StringBuilder src, File target) throws IOException {
try (
FileOutputStream fileOut = new FileOutputStream(target);
OutputStreamWriter out = new OutputStreamWriter(fileOut, "utf-8")
) {
out.append(src);
out.flush();
}
}
@SuppressWarnings("UseOfSystemOutOrSystemErr")
private static void say(String msg) {
System.out.println(msg);
}
private static List<File> getSampleSources(String samplesSourcePath) {
String[] tokens = samplesSourcePath.split(";");
List<File> files = new ArrayList<>();
for (String token : tokens) {
File src = new File(token.trim());
if (src.isDirectory()) {
files.add(src);
}
}
return files;
}
private static String resolveBrush(String path) {
int dot = path.lastIndexOf('.');
if (dot >= 0 && dot < path.length() - 1) {
return path.substring(dot + 1);
}
return null;
}
private static int getWhitespacesOffset(String s) {
for (int i = 0; i < s.length(); i++) {
if (!Character.isSpaceChar(s.charAt(i))) {
return i;
}
}
return 0;
}
}
| Simplified reading/writing of javadoc files.
| hekate-dev-build/src/main/java/io/hekate/dev/CodeSamplesProcessorMain.java | Simplified reading/writing of javadoc files. |
|
Java | apache-2.0 | c402870bcdfcccef9ee7b8d0ba94dd38bc71702a | 0 | apache/directory-server,apache/directory-server,drankye/directory-server,drankye/directory-server,lucastheisen/apache-directory-server,darranl/directory-server,darranl/directory-server,lucastheisen/apache-directory-server | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.server.ldap;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.naming.Context;
import javax.naming.NamingException;
import javax.naming.ldap.Control;
import org.apache.directory.server.core.DirectoryService;
import org.apache.directory.server.core.partition.PartitionNexus;
import org.apache.directory.server.ldap.support.AbandonHandler;
import org.apache.directory.server.ldap.support.AddHandler;
import org.apache.directory.server.ldap.support.BindHandler;
import org.apache.directory.server.ldap.support.CompareHandler;
import org.apache.directory.server.ldap.support.DefaultAbandonHandler;
import org.apache.directory.server.ldap.support.DefaultAddHandler;
import org.apache.directory.server.ldap.support.DefaultBindHandler;
import org.apache.directory.server.ldap.support.DefaultCompareHandler;
import org.apache.directory.server.ldap.support.DefaultDeleteHandler;
import org.apache.directory.server.ldap.support.DefaultExtendedHandler;
import org.apache.directory.server.ldap.support.DefaultModifyDnHandler;
import org.apache.directory.server.ldap.support.DefaultModifyHandler;
import org.apache.directory.server.ldap.support.DefaultSearchHandler;
import org.apache.directory.server.ldap.support.DefaultUnbindHandler;
import org.apache.directory.server.ldap.support.DeleteHandler;
import org.apache.directory.server.ldap.support.ExtendedHandler;
import org.apache.directory.server.ldap.support.ModifyDnHandler;
import org.apache.directory.server.ldap.support.ModifyHandler;
import org.apache.directory.server.ldap.support.SearchHandler;
import org.apache.directory.server.ldap.support.UnbindHandler;
import org.apache.directory.server.ldap.support.ssl.LdapsInitializer;
import org.apache.directory.server.protocol.shared.ServiceConfiguration;
import org.apache.directory.server.protocol.shared.ServiceConfigurationException;
import org.apache.directory.server.schema.registries.AttributeTypeRegistry;
import org.apache.directory.shared.asn1.codec.Asn1CodecDecoder;
import org.apache.directory.shared.asn1.codec.Asn1CodecEncoder;
import org.apache.directory.shared.ldap.exception.LdapConfigurationException;
import org.apache.directory.shared.ldap.exception.LdapNamingException;
import org.apache.directory.shared.ldap.message.AbandonRequest;
import org.apache.directory.shared.ldap.message.AddRequest;
import org.apache.directory.shared.ldap.message.BindRequest;
import org.apache.directory.shared.ldap.message.CascadeControl;
import org.apache.directory.shared.ldap.message.CompareRequest;
import org.apache.directory.shared.ldap.message.DeleteRequest;
import org.apache.directory.shared.ldap.message.EntryChangeControl;
import org.apache.directory.shared.ldap.message.ExtendedRequest;
import org.apache.directory.shared.ldap.message.ExtendedRequestImpl;
import org.apache.directory.shared.ldap.message.ManageDsaITControl;
import org.apache.directory.shared.ldap.message.MessageDecoder;
import org.apache.directory.shared.ldap.message.MessageEncoder;
import org.apache.directory.shared.ldap.message.ModifyDnRequest;
import org.apache.directory.shared.ldap.message.ModifyRequest;
import org.apache.directory.shared.ldap.message.MutableControl;
import org.apache.directory.shared.ldap.message.PersistentSearchControl;
import org.apache.directory.shared.ldap.message.Request;
import org.apache.directory.shared.ldap.message.ResponseCarryingMessageException;
import org.apache.directory.shared.ldap.message.ResultCodeEnum;
import org.apache.directory.shared.ldap.message.ResultResponse;
import org.apache.directory.shared.ldap.message.ResultResponseRequest;
import org.apache.directory.shared.ldap.message.SearchRequest;
import org.apache.directory.shared.ldap.message.SubentriesControl;
import org.apache.directory.shared.ldap.message.UnbindRequest;
import org.apache.directory.shared.ldap.message.extended.NoticeOfDisconnect;
import org.apache.directory.shared.ldap.message.spi.BinaryAttributeDetector;
import org.apache.directory.shared.ldap.schema.AttributeType;
import org.apache.mina.common.DefaultIoFilterChainBuilder;
import org.apache.mina.common.IoFilterChain;
import org.apache.mina.common.IoFilterChainBuilder;
import org.apache.mina.common.IoHandler;
import org.apache.mina.common.IoSession;
import org.apache.mina.common.ThreadModel;
import org.apache.mina.common.WriteFuture;
import org.apache.mina.filter.SSLFilter;
import org.apache.mina.filter.codec.ProtocolCodecFactory;
import org.apache.mina.filter.codec.ProtocolCodecFilter;
import org.apache.mina.filter.codec.ProtocolDecoder;
import org.apache.mina.filter.codec.ProtocolEncoder;
import org.apache.mina.handler.demux.DemuxingIoHandler;
import org.apache.mina.transport.socket.nio.SocketAcceptor;
import org.apache.mina.transport.socket.nio.SocketAcceptorConfig;
import org.apache.mina.util.SessionLog;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An LDAP protocol provider implementation which dynamically associates
* handlers.
*
* @author <a href="mailto:[email protected]">Apache Directory Project</a>
* @version $Rev$
* @org.apache.xbean.XBean
*/
public class LdapServer extends ServiceConfiguration
{
@SuppressWarnings ( { "UnusedDeclaration" } )
private static final long serialVersionUID = 3757127143811666817L;
/** logger for this class */
private static final Logger LOG = LoggerFactory.getLogger( LdapServer.class.getName() );
/** The default maximum size limit. */
private static final int MAX_SIZE_LIMIT_DEFAULT = 100;
/** The default maximum time limit. */
private static final int MAX_TIME_LIMIT_DEFAULT = 10000;
/**
* The default service pid.
*/
private static final String SERVICE_PID_DEFAULT = "org.apache.directory.server.ldap";
/**
* The default service name.
*/
private static final String SERVICE_NAME_DEFAULT = "ApacheDS LDAP Service";
/** The default IP port. */
private static final int IP_PORT_DEFAULT = 389;
/** the constant service name of this ldap protocol provider **/
public static final String SERVICE_NAME = "ldap";
/** a set of supported controls */
private Set<String> supportedControls;
/** The maximum size limit. */
private int maxSizeLimit = MAX_SIZE_LIMIT_DEFAULT; // set to default value
/** The maximum time limit. */
private int maxTimeLimit = MAX_TIME_LIMIT_DEFAULT; // set to default value (milliseconds)
/** Whether LDAPS is enabled. */
private boolean enableLdaps;
/** Whether to allow anonymous access. */
private boolean allowAnonymousAccess = true; // allow by default
/** The path to the certificate file. */
private File ldapsCertificateFile = new File( "server-work" + File.separator + "certificates" + File.separator
+ "server.cert" );
/** The certificate password. */
private String ldapsCertificatePassword = "changeit";
/** The extended operation handlers. */
private final Collection<ExtendedOperationHandler> extendedOperationHandlers = new ArrayList<ExtendedOperationHandler>();
/** The supported authentication mechanisms. */
private Set<String> supportedMechanisms;
/** The name of this host, validated during SASL negotiation. */
private String saslHost = "ldap.example.com";
/** The service principal, used by GSSAPI. */
private String saslPrincipal = "ldap/[email protected]";
/** The quality of protection (QoP), used by DIGEST-MD5 and GSSAPI. */
private List<String> saslQop;
/** The list of realms serviced by this host. */
private List<String> saslRealms;
private AbandonHandler abandonHandler;
private AddHandler addHandler;
private BindHandler bindHandler;
private CompareHandler compareHandler;
private DeleteHandler deleteHandler;
private ExtendedHandler extendedHandler;
private ModifyHandler modifyHandler;
private ModifyDnHandler modifyDnHandler;
private SearchHandler searchHandler;
private UnbindHandler unbindHandler;
private SessionRegistry registry;
/** the underlying provider codec factory */
private ProtocolCodecFactory codecFactory;
/** the MINA protocol handler */
private final LdapProtocolHandler handler = new LdapProtocolHandler();
private final SocketAcceptor socketAcceptor;
private final DirectoryService directoryService;
/** tracks state of the server */
private boolean started;
/**
* Creates an LDAP protocol provider.
*
* @param socketAcceptor the mina socket acceptor wrapper
* @param directoryService the directory service core
*/
public LdapServer( SocketAcceptor socketAcceptor, DirectoryService directoryService )
{
this.socketAcceptor = socketAcceptor;
this.directoryService = directoryService;
this.codecFactory = new ProtocolCodecFactoryImpl( directoryService );
Hashtable<String,Object> copy = new Hashtable<String,Object>();
copy.put( Context.PROVIDER_URL, "" );
copy.put( Context.INITIAL_CONTEXT_FACTORY, "org.apache.directory.server.core.jndi.CoreContextFactory" );
copy.put( DirectoryService.JNDI_KEY, directoryService );
this.registry = new SessionRegistry( this, copy );
super.setIpPort( IP_PORT_DEFAULT );
super.setEnabled( true );
super.setServicePid( SERVICE_PID_DEFAULT );
super.setServiceName( SERVICE_NAME_DEFAULT );
supportedMechanisms = new HashSet<String>();
supportedMechanisms.add( "SIMPLE" );
supportedMechanisms.add( "CRAM-MD5" );
supportedMechanisms.add( "DIGEST-MD5" );
supportedMechanisms.add( "GSSAPI" );
saslQop = new ArrayList<String>();
saslQop.add( "auth" );
saslQop.add( "auth-int" );
saslQop.add( "auth-conf" );
saslRealms = new ArrayList<String>();
saslRealms.add( "example.com" );
this.supportedControls = new HashSet<String>();
this.supportedControls.add( PersistentSearchControl.CONTROL_OID );
this.supportedControls.add( EntryChangeControl.CONTROL_OID );
this.supportedControls.add( SubentriesControl.CONTROL_OID );
this.supportedControls.add( ManageDsaITControl.CONTROL_OID );
this.supportedControls.add( CascadeControl.CONTROL_OID );
setAbandonHandler( new DefaultAbandonHandler() );
setAddHandler( new DefaultAddHandler() );
setBindHandler( new DefaultBindHandler() );
setCompareHandler( new DefaultCompareHandler() );
setDeleteHandler( new DefaultDeleteHandler() );
setExtendedHandler( new DefaultExtendedHandler() );
setModifyHandler( new DefaultModifyHandler() );
setModifyDnHandler( new DefaultModifyDnHandler() );
setSearchHandler( new DefaultSearchHandler() );
setUnbindHandler( new DefaultUnbindHandler() );
}
/**
* @org.apache.xbean.InitMethod
* @throws IOException if we cannot bind to the specified port
* @throws NamingException if the LDAP server cannot be started
*/
public void start() throws NamingException, IOException
{
if ( ! isEnabled() )
{
return;
}
IoFilterChainBuilder chain;
if ( isEnableLdaps() )
{
char[] certPasswordChars = getLdapsCertificatePassword().toCharArray();
String storePath = getLdapsCertificateFile().getPath();
chain = LdapsInitializer.init( certPasswordChars, storePath );
}
else
{
chain = new DefaultIoFilterChainBuilder();
}
startLDAP0( getIpPort(), chain );
started = true;
}
/**
* @org.apache.xbean.DestroyMethod
*/
public void stop()
{
try
{
// we should unbind the service before we begin sending the notice
// of disconnect so new connections are not formed while we process
List<WriteFuture> writeFutures = new ArrayList<WriteFuture>();
// If the socket has already been unbound as with a successful
// GracefulShutdownRequest then this will complain that the service
// is not bound - this is ok because the GracefulShutdown has already
// sent notices to to the existing active sessions
List<IoSession> sessions;
try
{
sessions = new ArrayList<IoSession>(
socketAcceptor.getManagedSessions( new InetSocketAddress( getIpPort() ) ) );
}
catch ( IllegalArgumentException e )
{
LOG.warn( "Seems like the LDAP service (" + getIpPort() + ") has already been unbound." );
return;
}
socketAcceptor.unbind( new InetSocketAddress( getIpPort() ) );
if ( LOG.isInfoEnabled() )
{
LOG.info( "Unbind of an LDAP service (" + getIpPort() + ") is complete." );
LOG.info( "Sending notice of disconnect to existing clients sessions." );
}
// Send Notification of Disconnection messages to all connected clients.
if ( sessions != null )
{
for ( IoSession session:sessions )
{
writeFutures.add( session.write( NoticeOfDisconnect.UNAVAILABLE ) );
}
}
// And close the connections when the NoDs are sent.
Iterator<IoSession> sessionIt = sessions.iterator();
for ( WriteFuture future:writeFutures )
{
future.join( 1000 );
sessionIt.next().close();
}
}
catch ( Exception e )
{
LOG.warn( "Failed to sent NoD.", e );
}
}
private void startLDAP0( int port, IoFilterChainBuilder chainBuilder )
throws LdapNamingException, LdapConfigurationException
{
for ( ExtendedOperationHandler h : getExtendedOperationHandlers() )
{
addExtendedOperationHandler( h );
LOG.info( "Added Extended Request Handler: " + h.getOid() );
h.setLdapProvider( this );
PartitionNexus nexus = directoryService.getPartitionNexus();
nexus.registerSupportedExtensions( h.getExtensionOids() );
}
try
{
SocketAcceptorConfig acceptorCfg = new SocketAcceptorConfig();
// Disable the disconnection of the clients on unbind
acceptorCfg.setDisconnectOnUnbind( false );
acceptorCfg.setReuseAddress( true );
acceptorCfg.setFilterChainBuilder( chainBuilder );
acceptorCfg.setThreadModel( ThreadModel.MANUAL );
acceptorCfg.getSessionConfig().setTcpNoDelay( true );
socketAcceptor.bind( new InetSocketAddress( port ), getHandler(), acceptorCfg );
started = true;
if ( LOG.isInfoEnabled() )
{
LOG.info( "Successful bind of an LDAP Service (" + port + ") is complete." );
}
}
catch ( IOException e )
{
String msg = "Failed to bind an LDAP service (" + port + ") to the service registry.";
LdapConfigurationException lce = new LdapConfigurationException( msg );
lce.setRootCause( e );
LOG.error( msg, e );
throw lce;
}
}
public String getName()
{
return SERVICE_NAME;
}
public ProtocolCodecFactory getCodecFactory()
{
return codecFactory;
}
public IoHandler getHandler()
{
return handler;
}
// ------------------------------------------------------------------------
// Configuration Methods
// ------------------------------------------------------------------------
/**
* Registeres the specified {@link ExtendedOperationHandler} to this
* protocol provider to provide a specific LDAP extended operation.
*
* @param eoh an extended operation handler
*/
public void addExtendedOperationHandler( ExtendedOperationHandler eoh )
{
extendedHandler.addHandler( eoh );
}
/**
* Deregisteres an {@link ExtendedOperationHandler} with the specified <tt>oid</tt>
* from this protocol provider.
*
* @param oid the numeric identifier for the extended operation associated with
* the handler to remove
*/
public void removeExtendedOperationHandler( String oid )
{
extendedHandler.removeHandler( oid );
}
/**
* Returns an {@link ExtendedOperationHandler} with the specified <tt>oid</tt>
* which is registered to this protocol provider.
*
* @param oid the oid of the extended request of associated with the extended
* request handler
* @return the exnteded operation handler
*/
public ExtendedOperationHandler getExtendedOperationHandler( String oid )
{
return extendedHandler.getHandler( oid );
}
/**
* Returns a {@link Map} of all registered OID-{@link ExtendedOperationHandler}
* pairs.
*
* @return map of all extended operation handlers
*/
public Map<String,ExtendedOperationHandler> getExtendedOperationHandlerMap()
{
return extendedHandler.getHandlerMap();
}
/**
* Returns <tt>true</tt> if LDAPS is enabled.
*
* @return True if LDAPS is enabled.
*/
public boolean isEnableLdaps()
{
return enableLdaps;
}
/**
* Sets if LDAPS is enabled or not.
*
* @param enableLdaps Whether LDAPS is enabled.
*/
public void setEnableLdaps( boolean enableLdaps )
{
this.enableLdaps = enableLdaps;
}
/**
* Returns the path of the X509 (or JKS) certificate file for LDAPS.
* The default value is <tt>"<WORKDIR>/certificates/server.cert"</tt>.
*
* @return The LDAPS certificate file.
*/
public File getLdapsCertificateFile()
{
return ldapsCertificateFile;
}
/**
* Sets the path of the SunX509 certificate file (either PKCS12 or JKS format)
* for LDAPS.
*
* @param ldapsCertificateFile The path to the SunX509 certificate.
*/
public void setLdapsCertificateFile( File ldapsCertificateFile )
{
if ( ldapsCertificateFile == null )
{
throw new ServiceConfigurationException( "LdapsCertificateFile cannot be null." );
}
this.ldapsCertificateFile = ldapsCertificateFile;
}
/**
* Returns the password which is used to load the the SunX509 certificate file
* (either PKCS12 or JKS format).
* The default value is <tt>"changeit"</tt>. This is the same value with what
* <a href="http://jakarta.apache.org/tomcat/">Apache Jakarta Tomcat</a> uses by
* default.
*
* @return The LDAPS certificate password.
*/
public String getLdapsCertificatePassword()
{
return ldapsCertificatePassword;
}
/**
* Sets the password which is used to load the LDAPS certificate file.
*
* @param ldapsCertificatePassword The certificate password.
*/
public void setLdapsCertificatePassword( String ldapsCertificatePassword )
{
if ( ldapsCertificatePassword == null )
{
throw new ServiceConfigurationException( "LdapsCertificatePassword cannot be null." );
}
this.ldapsCertificatePassword = ldapsCertificatePassword;
}
/**
* Returns <code>true</code> if anonymous access is allowed.
*
* @return True if anonymous access is allowed.
*/
public boolean isAllowAnonymousAccess()
{
return allowAnonymousAccess;
}
/**
* Sets whether to allow anonymous access or not.
*
* @param enableAnonymousAccess Set <code>true</code> to allow anonymous access.
*/
public void setAllowAnonymousAccess( boolean enableAnonymousAccess )
{
this.allowAnonymousAccess = enableAnonymousAccess;
}
/**
* Sets the maximum size limit in number of entries to return for search.
*
* @param maxSizeLimit the maximum number of entries to return for search
*/
public void setMaxSizeLimit( int maxSizeLimit )
{
this.maxSizeLimit = maxSizeLimit;
}
/**
* Returns the maximum size limit in number of entries to return for search.
*
* @return The maximum size limit.
*/
public int getMaxSizeLimit()
{
return maxSizeLimit;
}
/**
* Sets the maximum time limit in miliseconds to conduct a search.
*
* @param maxTimeLimit the maximum length of time in milliseconds for search
*/
public void setMaxTimeLimit( int maxTimeLimit )
{
this.maxTimeLimit = maxTimeLimit;
}
/**
* Returns the maximum time limit in milliseonds to conduct a search.
*
* @return The maximum time limit in milliseconds for search
*/
public int getMaxTimeLimit()
{
return maxTimeLimit;
}
/**
* Gets the {@link ExtendedOperationHandler}s.
*
* @return A collection of {@link ExtendedOperationHandler}s.
*/
public Collection<ExtendedOperationHandler> getExtendedOperationHandlers()
{
return new ArrayList<ExtendedOperationHandler>( extendedOperationHandlers );
}
/**
* Sets the {@link ExtendedOperationHandler}s.
*
* @org.apache.xbean.Property nestedType="org.apache.directory.server.ldap.ExtendedOperationHandler"
*
* @param handlers A collection of {@link ExtendedOperationHandler}s.
*/
public void setExtendedOperationHandlers( Collection<ExtendedOperationHandler> handlers )
{
this.extendedOperationHandlers.clear();
this.extendedOperationHandlers.addAll( handlers );
}
/**
* Returns the FQDN of this SASL host, validated during SASL negotiation.
*
* @return The FQDN of this SASL host, validated during SASL negotiation.
*/
public String getSaslHost()
{
return saslHost;
}
/**
* Sets the FQDN of this SASL host, validated during SASL negotiation.
*
* @param saslHost The FQDN of this SASL host, validated during SASL negotiation.
*/
public void setSaslHost( String saslHost )
{
this.saslHost = saslHost;
}
/**
* Returns the Kerberos principal name for this LDAP service, used by GSSAPI.
*
* @return The Kerberos principal name for this LDAP service, used by GSSAPI.
*/
public String getSaslPrincipal()
{
return saslPrincipal;
}
/**
* Sets the Kerberos principal name for this LDAP service, used by GSSAPI.
*
* @param saslPrincipal The Kerberos principal name for this LDAP service, used by GSSAPI.
*/
public void setSaslPrincipal( String saslPrincipal )
{
this.saslPrincipal = saslPrincipal;
}
/**
* Returns the desired quality-of-protection, used by DIGEST-MD5 and GSSAPI.
*
* @return The desired quality-of-protection, used by DIGEST-MD5 and GSSAPI.
*/
public List<String> getSaslQop()
{
return saslQop;
}
/**
* Sets the desired quality-of-protection, used by DIGEST-MD5 and GSSAPI.
*
* @org.apache.xbean.Property nestedType="java.lang.String"
*
* @param saslQop The desired quality-of-protection, used by DIGEST-MD5 and GSSAPI.
*/
public void setSaslQop( List<String> saslQop )
{
this.saslQop = saslQop;
}
/**
* Returns the realms serviced by this SASL host, used by DIGEST-MD5 and GSSAPI.
*
* @return The realms serviced by this SASL host, used by DIGEST-MD5 and GSSAPI.
*/
public List getSaslRealms()
{
return saslRealms;
}
/**
* Sets the realms serviced by this SASL host, used by DIGEST-MD5 and GSSAPI.
*
* @org.apache.xbean.Property nestedType="java.lang.String"
*
* @param saslRealms The realms serviced by this SASL host, used by DIGEST-MD5 and GSSAPI.
*/
public void setSaslRealms( List<String> saslRealms )
{
this.saslRealms = saslRealms;
}
/**
* Returns the list of supported authentication mechanisms.
*
* @return The list of supported authentication mechanisms.
*/
public Set<String> getSupportedMechanisms()
{
return supportedMechanisms;
}
/**
* Sets the list of supported authentication mechanisms.
*
* @org.apache.xbean.Property propertyEditor="ListEditor" nestedType="java.lang.String"
*
* @param supportedMechanisms The list of supported authentication mechanisms.
*/
public void setSupportedMechanisms( Set<String> supportedMechanisms )
{
this.supportedMechanisms = supportedMechanisms;
}
public DirectoryService getDirectoryService()
{
return directoryService;
}
public Set<String> getSupportedControls()
{
return supportedControls;
}
public void setSupportedControls( Set<String> supportedControls )
{
this.supportedControls = supportedControls;
}
public AbandonHandler getAbandonHandler()
{
return abandonHandler;
}
public void setAbandonHandler( AbandonHandler abandonHandler )
{
this.handler.removeMessageHandler( AbandonRequest.class );
this.abandonHandler = abandonHandler;
this.abandonHandler.setProtocolProvider( this );
//noinspection unchecked
this.handler.addMessageHandler( AbandonRequest.class, this.abandonHandler );
}
public AddHandler getAddHandler()
{
return addHandler;
}
public void setAddHandler( AddHandler addHandler )
{
this.handler.removeMessageHandler( AddRequest.class );
this.addHandler = addHandler;
this.addHandler.setProtocolProvider( this );
//noinspection unchecked
this.handler.addMessageHandler( AddRequest.class, this.addHandler );
}
public BindHandler getBindHandler()
{
return bindHandler;
}
public void setBindHandler( BindHandler bindHandler )
{
this.handler.removeMessageHandler( BindRequest.class );
this.bindHandler = bindHandler;
this.bindHandler.setProtocolProvider( this );
this.bindHandler.setDirectoryService( directoryService );
//noinspection unchecked
this.handler.addMessageHandler( BindRequest.class, this.bindHandler );
}
public CompareHandler getCompareHandler()
{
return compareHandler;
}
public void setCompareHandler( CompareHandler compareHandler )
{
this.handler.removeMessageHandler( CompareRequest.class );
this.compareHandler = compareHandler;
this.compareHandler.setProtocolProvider( this );
//noinspection unchecked
this.handler.addMessageHandler( CompareRequest.class, this.compareHandler );
}
public DeleteHandler getDeleteHandler()
{
return deleteHandler;
}
public void setDeleteHandler( DeleteHandler deleteHandler )
{
this.handler.removeMessageHandler( DeleteRequest.class );
this.deleteHandler = deleteHandler;
this.deleteHandler.setProtocolProvider( this );
//noinspection unchecked
this.handler.addMessageHandler( DeleteRequest.class, this.deleteHandler );
}
public ExtendedHandler getExtendedHandler()
{
return extendedHandler;
}
public void setExtendedHandler( ExtendedHandler extendedHandler )
{
this.handler.removeMessageHandler( ExtendedRequest.class );
this.extendedHandler = extendedHandler;
this.extendedHandler.setProtocolProvider( this );
//noinspection unchecked
this.handler.addMessageHandler( ExtendedRequest.class, this.extendedHandler );
}
public ModifyHandler getModifyHandler()
{
return modifyHandler;
}
public void setModifyHandler( ModifyHandler modifyHandler )
{
this.handler.removeMessageHandler( ModifyRequest.class );
this.modifyHandler = modifyHandler;
this.modifyHandler.setProtocolProvider( this );
//noinspection unchecked
this.handler.addMessageHandler( ModifyRequest.class, this.modifyHandler );
}
public ModifyDnHandler getModifyDnHandler()
{
return modifyDnHandler;
}
public void setModifyDnHandler( ModifyDnHandler modifyDnHandler )
{
this.handler.removeMessageHandler( ModifyDnRequest.class );
this.modifyDnHandler = modifyDnHandler;
this.modifyDnHandler.setProtocolProvider( this );
//noinspection unchecked
this.handler.addMessageHandler( ModifyDnRequest.class, this.modifyDnHandler );
}
public SearchHandler getSearchHandler()
{
return searchHandler;
}
public void setSearchHandler( SearchHandler searchHandler )
{
this.handler.removeMessageHandler( SearchRequest.class );
this.searchHandler = searchHandler;
this.searchHandler.setProtocolProvider( this );
//noinspection unchecked
this.handler.addMessageHandler( SearchRequest.class, this.searchHandler );
}
public UnbindHandler getUnbindHandler()
{
return unbindHandler;
}
public void setUnbindHandler( UnbindHandler unbindHandler )
{
this.handler.removeMessageHandler( UnbindRequest.class );
this.unbindHandler = unbindHandler;
this.unbindHandler.setProtocolProvider( this );
//noinspection unchecked
this.handler.addMessageHandler( UnbindRequest.class, this.unbindHandler );
}
public SessionRegistry getRegistry()
{
return registry;
}
public boolean isStarted()
{
return started;
}
public void setStarted( boolean started )
{
this.started = started;
}
/**
* A snickers based BER Decoder factory.
*/
private static final class ProtocolCodecFactoryImpl implements ProtocolCodecFactory
{
final DirectoryService directoryService;
public ProtocolCodecFactoryImpl( DirectoryService directoryService )
{
this.directoryService = directoryService;
}
public ProtocolEncoder getEncoder()
{
return new Asn1CodecEncoder( new MessageEncoder() );
}
public ProtocolDecoder getDecoder()
{
return new Asn1CodecDecoder( new MessageDecoder( new BinaryAttributeDetector()
{
public boolean isBinary( String id )
{
AttributeTypeRegistry attrRegistry = directoryService.getRegistries().getAttributeTypeRegistry();
try
{
AttributeType type = attrRegistry.lookup( id );
return ! type.getSyntax().isHumanReadable();
}
catch ( NamingException e )
{
return false;
}
}
}) );
}
}
private class LdapProtocolHandler extends DemuxingIoHandler
{
public void sessionCreated( IoSession session ) throws Exception
{
session.setAttribute( LdapServer.class.toString(), LdapServer.this );
IoFilterChain filters = session.getFilterChain();
filters.addLast( "codec", new ProtocolCodecFilter( codecFactory ) );
}
public void sessionClosed( IoSession session )
{
registry.remove( session );
}
public void messageReceived( IoSession session, Object message ) throws Exception
{
// Translate SSLFilter messages into LDAP extended request
// defined in RFC #2830, 'Lightweight Directory Access Protocol (v3):
// Extension for Transport Layer Security'.
//
// The RFC specifies the payload should be empty, but we use
// it to notify the TLS state changes. This hack should be
// OK from the viewpoint of security because StartTLS
// handler should react to only SESSION_UNSECURED message
// and degrade authentication level to 'anonymous' as specified
// in the RFC, and this is no threat.
if ( message == SSLFilter.SESSION_SECURED )
{
ExtendedRequest req = new ExtendedRequestImpl( 0 );
req.setOid( "1.3.6.1.4.1.1466.20037" );
req.setPayload( "SECURED".getBytes( "ISO-8859-1" ) );
message = req;
}
else if ( message == SSLFilter.SESSION_UNSECURED )
{
ExtendedRequest req = new ExtendedRequestImpl( 0 );
req.setOid( "1.3.6.1.4.1.1466.20037" );
req.setPayload( "UNSECURED".getBytes( "ISO-8859-1" ) );
message = req;
}
if ( ( ( Request ) message ).getControls().size() > 0 && message instanceof ResultResponseRequest )
{
ResultResponseRequest req = ( ResultResponseRequest ) message;
for ( Control control1 : req.getControls().values() )
{
MutableControl control = ( MutableControl ) control1;
if ( control.isCritical() && !supportedControls.contains( control.getID() ) )
{
ResultResponse resp = req.getResultResponse();
resp.getLdapResult().setErrorMessage( "Unsupport critical control: " + control.getID() );
resp.getLdapResult().setResultCode( ResultCodeEnum.UNAVAILABLE_CRITICAL_EXTENSION );
session.write( resp );
return;
}
}
}
super.messageReceived( session, message );
}
public void exceptionCaught( IoSession session, Throwable cause )
{
if ( cause.getCause() instanceof ResponseCarryingMessageException )
{
ResponseCarryingMessageException rcme = ( ResponseCarryingMessageException ) cause.getCause();
session.write( rcme.getResponse() );
return;
}
SessionLog.warn( session,
"Unexpected exception forcing session to close: sending disconnect notice to client.", cause );
session.write( NoticeOfDisconnect.PROTOCOLERROR );
registry.remove( session );
session.close();
}
}
}
| protocol-ldap/src/main/java/org/apache/directory/server/ldap/LdapServer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.server.ldap;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.naming.Context;
import javax.naming.NamingException;
import javax.naming.ldap.Control;
import org.apache.directory.server.core.DirectoryService;
import org.apache.directory.server.core.partition.PartitionNexus;
import org.apache.directory.server.ldap.support.AbandonHandler;
import org.apache.directory.server.ldap.support.AddHandler;
import org.apache.directory.server.ldap.support.BindHandler;
import org.apache.directory.server.ldap.support.CompareHandler;
import org.apache.directory.server.ldap.support.DefaultAbandonHandler;
import org.apache.directory.server.ldap.support.DefaultAddHandler;
import org.apache.directory.server.ldap.support.DefaultBindHandler;
import org.apache.directory.server.ldap.support.DefaultCompareHandler;
import org.apache.directory.server.ldap.support.DefaultDeleteHandler;
import org.apache.directory.server.ldap.support.DefaultExtendedHandler;
import org.apache.directory.server.ldap.support.DefaultModifyDnHandler;
import org.apache.directory.server.ldap.support.DefaultModifyHandler;
import org.apache.directory.server.ldap.support.DefaultSearchHandler;
import org.apache.directory.server.ldap.support.DefaultUnbindHandler;
import org.apache.directory.server.ldap.support.DeleteHandler;
import org.apache.directory.server.ldap.support.ExtendedHandler;
import org.apache.directory.server.ldap.support.ModifyDnHandler;
import org.apache.directory.server.ldap.support.ModifyHandler;
import org.apache.directory.server.ldap.support.SearchHandler;
import org.apache.directory.server.ldap.support.UnbindHandler;
import org.apache.directory.server.ldap.support.ssl.LdapsInitializer;
import org.apache.directory.server.protocol.shared.ServiceConfiguration;
import org.apache.directory.server.protocol.shared.ServiceConfigurationException;
import org.apache.directory.server.schema.registries.AttributeTypeRegistry;
import org.apache.directory.shared.asn1.codec.Asn1CodecDecoder;
import org.apache.directory.shared.asn1.codec.Asn1CodecEncoder;
import org.apache.directory.shared.ldap.exception.LdapConfigurationException;
import org.apache.directory.shared.ldap.exception.LdapNamingException;
import org.apache.directory.shared.ldap.message.AbandonRequest;
import org.apache.directory.shared.ldap.message.AddRequest;
import org.apache.directory.shared.ldap.message.BindRequest;
import org.apache.directory.shared.ldap.message.CascadeControl;
import org.apache.directory.shared.ldap.message.CompareRequest;
import org.apache.directory.shared.ldap.message.DeleteRequest;
import org.apache.directory.shared.ldap.message.EntryChangeControl;
import org.apache.directory.shared.ldap.message.ExtendedRequest;
import org.apache.directory.shared.ldap.message.ExtendedRequestImpl;
import org.apache.directory.shared.ldap.message.ManageDsaITControl;
import org.apache.directory.shared.ldap.message.MessageDecoder;
import org.apache.directory.shared.ldap.message.MessageEncoder;
import org.apache.directory.shared.ldap.message.ModifyDnRequest;
import org.apache.directory.shared.ldap.message.ModifyRequest;
import org.apache.directory.shared.ldap.message.MutableControl;
import org.apache.directory.shared.ldap.message.PersistentSearchControl;
import org.apache.directory.shared.ldap.message.Request;
import org.apache.directory.shared.ldap.message.ResponseCarryingMessageException;
import org.apache.directory.shared.ldap.message.ResultCodeEnum;
import org.apache.directory.shared.ldap.message.ResultResponse;
import org.apache.directory.shared.ldap.message.ResultResponseRequest;
import org.apache.directory.shared.ldap.message.SearchRequest;
import org.apache.directory.shared.ldap.message.SubentriesControl;
import org.apache.directory.shared.ldap.message.UnbindRequest;
import org.apache.directory.shared.ldap.message.extended.NoticeOfDisconnect;
import org.apache.directory.shared.ldap.message.spi.BinaryAttributeDetector;
import org.apache.directory.shared.ldap.schema.AttributeType;
import org.apache.mina.common.DefaultIoFilterChainBuilder;
import org.apache.mina.common.IoFilterChain;
import org.apache.mina.common.IoFilterChainBuilder;
import org.apache.mina.common.IoHandler;
import org.apache.mina.common.IoSession;
import org.apache.mina.common.ThreadModel;
import org.apache.mina.common.WriteFuture;
import org.apache.mina.filter.SSLFilter;
import org.apache.mina.filter.codec.ProtocolCodecFactory;
import org.apache.mina.filter.codec.ProtocolCodecFilter;
import org.apache.mina.filter.codec.ProtocolDecoder;
import org.apache.mina.filter.codec.ProtocolEncoder;
import org.apache.mina.handler.demux.DemuxingIoHandler;
import org.apache.mina.transport.socket.nio.SocketAcceptor;
import org.apache.mina.transport.socket.nio.SocketAcceptorConfig;
import org.apache.mina.util.SessionLog;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An LDAP protocol provider implementation which dynamically associates
* handlers.
*
* @author <a href="mailto:[email protected]">Apache Directory Project</a>
* @version $Rev$
* @org.apache.xbean.XBean
*/
public class LdapServer extends ServiceConfiguration
{
@SuppressWarnings ( { "UnusedDeclaration" } )
private static final long serialVersionUID = 3757127143811666817L;
/** logger for this class */
private static final Logger LOG = LoggerFactory.getLogger( LdapServer.class.getName() );
/** The default maximum size limit. */
private static final int MAX_SIZE_LIMIT_DEFAULT = 100;
/** The default maximum time limit. */
private static final int MAX_TIME_LIMIT_DEFAULT = 10000;
/**
* The default service pid.
*/
private static final String SERVICE_PID_DEFAULT = "org.apache.directory.server.ldap";
/**
* The default service name.
*/
private static final String SERVICE_NAME_DEFAULT = "ApacheDS LDAP Service";
/** The default IP port. */
private static final int IP_PORT_DEFAULT = 389;
/** the constant service name of this ldap protocol provider **/
public static final String SERVICE_NAME = "ldap";
/** a set of supported controls */
private Set<String> supportedControls;
/** The maximum size limit. */
private int maxSizeLimit = MAX_SIZE_LIMIT_DEFAULT; // set to default value
/** The maximum time limit. */
private int maxTimeLimit = MAX_TIME_LIMIT_DEFAULT; // set to default value (milliseconds)
/** Whether LDAPS is enabled. */
private boolean enableLdaps;
/** Whether to allow anonymous access. */
private boolean allowAnonymousAccess = true; // allow by default
/** The path to the certificate file. */
private File ldapsCertificateFile = new File( "server-work" + File.separator + "certificates" + File.separator
+ "server.cert" );
/** The certificate password. */
private String ldapsCertificatePassword = "changeit";
/** The extended operation handlers. */
private final Collection<ExtendedOperationHandler> extendedOperationHandlers = new ArrayList<ExtendedOperationHandler>();
/** The supported authentication mechanisms. */
private Set<String> supportedMechanisms;
/** The name of this host, validated during SASL negotiation. */
private String saslHost = "ldap.example.com";
/** The service principal, used by GSSAPI. */
private String saslPrincipal = "ldap/[email protected]";
/** The quality of protection (QoP), used by DIGEST-MD5 and GSSAPI. */
private List<String> saslQop;
/** The list of realms serviced by this host. */
private List<String> saslRealms;
private AbandonHandler abandonHandler;
private AddHandler addHandler;
private BindHandler bindHandler;
private CompareHandler compareHandler;
private DeleteHandler deleteHandler;
private ExtendedHandler extendedHandler;
private ModifyHandler modifyHandler;
private ModifyDnHandler modifyDnHandler;
private SearchHandler searchHandler;
private UnbindHandler unbindHandler;
private SessionRegistry registry;
/** the underlying provider codec factory */
private ProtocolCodecFactory codecFactory;
/** the MINA protocol handler */
private final LdapProtocolHandler handler = new LdapProtocolHandler();
private final SocketAcceptor socketAcceptor;
private final DirectoryService directoryService;
/** tracks state of the server */
private boolean started;
/**
* Creates an LDAP protocol provider.
*
* @param socketAcceptor the mina socket acceptor wrapper
* @param directoryService
*/
public LdapServer( SocketAcceptor socketAcceptor, DirectoryService directoryService )
{
this.socketAcceptor = socketAcceptor;
this.directoryService = directoryService;
this.codecFactory = new ProtocolCodecFactoryImpl( directoryService );
Hashtable<String,Object> copy = new Hashtable<String,Object>();
copy.put( Context.PROVIDER_URL, "" );
copy.put( Context.INITIAL_CONTEXT_FACTORY, "org.apache.directory.server.core.jndi.CoreContextFactory" );
copy.put( DirectoryService.JNDI_KEY, directoryService );
this.registry = new SessionRegistry( this, copy );
super.setIpPort( IP_PORT_DEFAULT );
super.setEnabled( true );
super.setServicePid( SERVICE_PID_DEFAULT );
super.setServiceName( SERVICE_NAME_DEFAULT );
supportedMechanisms = new HashSet<String>();
supportedMechanisms.add( "SIMPLE" );
supportedMechanisms.add( "CRAM-MD5" );
supportedMechanisms.add( "DIGEST-MD5" );
supportedMechanisms.add( "GSSAPI" );
saslQop = new ArrayList<String>();
saslQop.add( "auth" );
saslQop.add( "auth-int" );
saslQop.add( "auth-conf" );
saslRealms = new ArrayList<String>();
saslRealms.add( "example.com" );
this.supportedControls = new HashSet<String>();
this.supportedControls.add( PersistentSearchControl.CONTROL_OID );
this.supportedControls.add( EntryChangeControl.CONTROL_OID );
this.supportedControls.add( SubentriesControl.CONTROL_OID );
this.supportedControls.add( ManageDsaITControl.CONTROL_OID );
this.supportedControls.add( CascadeControl.CONTROL_OID );
setAbandonHandler( new DefaultAbandonHandler() );
setAddHandler( new DefaultAddHandler() );
setBindHandler( new DefaultBindHandler() );
setCompareHandler( new DefaultCompareHandler() );
setDeleteHandler( new DefaultDeleteHandler() );
setExtendedHandler( new DefaultExtendedHandler() );
setModifyHandler( new DefaultModifyHandler() );
setModifyDnHandler( new DefaultModifyDnHandler() );
setSearchHandler( new DefaultSearchHandler() );
setUnbindHandler( new DefaultUnbindHandler() );
}
/**
* @org.apache.xbean.InitMethod
* @throws IOException if we cannot bind to the specified port
* @throws NamingException if the LDAP server cannot be started
*/
public void start() throws NamingException, IOException
{
if ( ! isEnabled() )
{
return;
}
IoFilterChainBuilder chain;
if ( isEnableLdaps() )
{
char[] certPasswordChars = getLdapsCertificatePassword().toCharArray();
String storePath = getLdapsCertificateFile().getPath();
chain = LdapsInitializer.init( certPasswordChars, storePath );
}
else
{
chain = new DefaultIoFilterChainBuilder();
}
startLDAP0( getIpPort(), chain );
started = true;
}
/**
* @org.apache.xbean.DestroyMethod
*/
public void stop()
{
try
{
// we should unbind the service before we begin sending the notice
// of disconnect so new connections are not formed while we process
List<WriteFuture> writeFutures = new ArrayList<WriteFuture>();
// If the socket has already been unbound as with a successful
// GracefulShutdownRequest then this will complain that the service
// is not bound - this is ok because the GracefulShutdown has already
// sent notices to to the existing active sessions
List<IoSession> sessions;
try
{
sessions = new ArrayList<IoSession>(
socketAcceptor.getManagedSessions( new InetSocketAddress( getIpPort() ) ) );
}
catch ( IllegalArgumentException e )
{
LOG.warn( "Seems like the LDAP service (" + getIpPort() + ") has already been unbound." );
return;
}
socketAcceptor.unbind( new InetSocketAddress( getIpPort() ) );
if ( LOG.isInfoEnabled() )
{
LOG.info( "Unbind of an LDAP service (" + getIpPort() + ") is complete." );
LOG.info( "Sending notice of disconnect to existing clients sessions." );
}
// Send Notification of Disconnection messages to all connected clients.
if ( sessions != null )
{
for ( IoSession session:sessions )
{
writeFutures.add( session.write( NoticeOfDisconnect.UNAVAILABLE ) );
}
}
// And close the connections when the NoDs are sent.
Iterator<IoSession> sessionIt = sessions.iterator();
for ( WriteFuture future:writeFutures )
{
future.join( 1000 );
sessionIt.next().close();
}
}
catch ( Exception e )
{
LOG.warn( "Failed to sent NoD.", e );
}
}
private void startLDAP0( int port, IoFilterChainBuilder chainBuilder )
throws LdapNamingException, LdapConfigurationException
{
for ( ExtendedOperationHandler h : getExtendedOperationHandlers() )
{
addExtendedOperationHandler( h );
LOG.info( "Added Extended Request Handler: " + h.getOid() );
h.setLdapProvider( this );
PartitionNexus nexus = directoryService.getPartitionNexus();
nexus.registerSupportedExtensions( h.getExtensionOids() );
}
try
{
SocketAcceptorConfig acceptorCfg = new SocketAcceptorConfig();
// Disable the disconnection of the clients on unbind
acceptorCfg.setDisconnectOnUnbind( false );
acceptorCfg.setReuseAddress( true );
acceptorCfg.setFilterChainBuilder( chainBuilder );
acceptorCfg.setThreadModel( ThreadModel.MANUAL );
acceptorCfg.getSessionConfig().setTcpNoDelay( true );
socketAcceptor.bind( new InetSocketAddress( port ), getHandler(), acceptorCfg );
started = true;
if ( LOG.isInfoEnabled() )
{
LOG.info( "Successful bind of an LDAP Service (" + port + ") is complete." );
}
}
catch ( IOException e )
{
String msg = "Failed to bind an LDAP service (" + port + ") to the service registry.";
LdapConfigurationException lce = new LdapConfigurationException( msg );
lce.setRootCause( e );
LOG.error( msg, e );
throw lce;
}
}
public String getName()
{
return SERVICE_NAME;
}
public ProtocolCodecFactory getCodecFactory()
{
return codecFactory;
}
public IoHandler getHandler()
{
return handler;
}
// ------------------------------------------------------------------------
// Configuration Methods
// ------------------------------------------------------------------------
/**
* Registeres the specified {@link ExtendedOperationHandler} to this
* protocol provider to provide a specific LDAP extended operation.
*
* @param eoh an extended operation handler
*/
public void addExtendedOperationHandler( ExtendedOperationHandler eoh )
{
extendedHandler.addHandler( eoh );
}
/**
* Deregisteres an {@link ExtendedOperationHandler} with the specified <tt>oid</tt>
* from this protocol provider.
*
* @param oid the numeric identifier for the extended operation associated with
* the handler to remove
*/
public void removeExtendedOperationHandler( String oid )
{
extendedHandler.removeHandler( oid );
}
/**
* Returns an {@link ExtendedOperationHandler} with the specified <tt>oid</tt>
* which is registered to this protocol provider.
*
* @param oid the oid of the extended request of associated with the extended
* request handler
* @return the exnteded operation handler
*/
public ExtendedOperationHandler getExtendedOperationHandler( String oid )
{
return extendedHandler.getHandler( oid );
}
/**
* Returns a {@link Map} of all registered OID-{@link ExtendedOperationHandler}
* pairs.
*
* @return map of all extended operation handlers
*/
public Map<String,ExtendedOperationHandler> getExtendedOperationHandlerMap()
{
return extendedHandler.getHandlerMap();
}
/**
* Returns <tt>true</tt> if LDAPS is enabled.
*
* @return True if LDAPS is enabled.
*/
public boolean isEnableLdaps()
{
return enableLdaps;
}
/**
* Sets if LDAPS is enabled or not.
*
* @param enableLdaps Whether LDAPS is enabled.
*/
public void setEnableLdaps( boolean enableLdaps )
{
this.enableLdaps = enableLdaps;
}
/**
* Returns the path of the X509 (or JKS) certificate file for LDAPS.
* The default value is <tt>"<WORKDIR>/certificates/server.cert"</tt>.
*
* @return The LDAPS certificate file.
*/
public File getLdapsCertificateFile()
{
return ldapsCertificateFile;
}
/**
* Sets the path of the SunX509 certificate file (either PKCS12 or JKS format)
* for LDAPS.
*
* @param ldapsCertificateFile The path to the SunX509 certificate.
*/
public void setLdapsCertificateFile( File ldapsCertificateFile )
{
if ( ldapsCertificateFile == null )
{
throw new ServiceConfigurationException( "LdapsCertificateFile cannot be null." );
}
this.ldapsCertificateFile = ldapsCertificateFile;
}
/**
* Returns the password which is used to load the the SunX509 certificate file
* (either PKCS12 or JKS format).
* The default value is <tt>"changeit"</tt>. This is the same value with what
* <a href="http://jakarta.apache.org/tomcat/">Apache Jakarta Tomcat</a> uses by
* default.
*
* @return The LDAPS certificate password.
*/
public String getLdapsCertificatePassword()
{
return ldapsCertificatePassword;
}
/**
* Sets the password which is used to load the LDAPS certificate file.
*
* @param ldapsCertificatePassword The certificate password.
*/
public void setLdapsCertificatePassword( String ldapsCertificatePassword )
{
if ( ldapsCertificatePassword == null )
{
throw new ServiceConfigurationException( "LdapsCertificatePassword cannot be null." );
}
this.ldapsCertificatePassword = ldapsCertificatePassword;
}
/**
* Returns <code>true</code> if anonymous access is allowed.
*
* @return True if anonymous access is allowed.
*/
public boolean isAllowAnonymousAccess()
{
return allowAnonymousAccess;
}
/**
* Sets whether to allow anonymous access or not.
*
* @param enableAnonymousAccess Set <code>true</code> to allow anonymous access.
*/
public void setAllowAnonymousAccess( boolean enableAnonymousAccess )
{
this.allowAnonymousAccess = enableAnonymousAccess;
}
/**
* Sets the maximum size limit in number of entries to return for search.
*
* @param maxSizeLimit the maximum number of entries to return for search
*/
public void setMaxSizeLimit( int maxSizeLimit )
{
this.maxSizeLimit = maxSizeLimit;
}
/**
* Returns the maximum size limit in number of entries to return for search.
*
* @return The maximum size limit.
*/
public int getMaxSizeLimit()
{
return maxSizeLimit;
}
/**
* Sets the maximum time limit in miliseconds to conduct a search.
*
* @param maxTimeLimit the maximum length of time in milliseconds for search
*/
public void setMaxTimeLimit( int maxTimeLimit )
{
this.maxTimeLimit = maxTimeLimit;
}
/**
* Returns the maximum time limit in milliseonds to conduct a search.
*
* @return The maximum time limit in milliseconds for search
*/
public int getMaxTimeLimit()
{
return maxTimeLimit;
}
/**
* Gets the {@link ExtendedOperationHandler}s.
*
* @return A collection of {@link ExtendedOperationHandler}s.
*/
public Collection<ExtendedOperationHandler> getExtendedOperationHandlers()
{
return new ArrayList<ExtendedOperationHandler>( extendedOperationHandlers );
}
/**
* Sets the {@link ExtendedOperationHandler}s.
*
* @org.apache.xbean.Property nestedType="org.apache.directory.server.ldap.ExtendedOperationHandler"
*
* @param handlers A collection of {@link ExtendedOperationHandler}s.
*/
public void setExtendedOperationHandlers( Collection<ExtendedOperationHandler> handlers )
{
this.extendedOperationHandlers.clear();
this.extendedOperationHandlers.addAll( handlers );
}
/**
* Returns the FQDN of this SASL host, validated during SASL negotiation.
*
* @return The FQDN of this SASL host, validated during SASL negotiation.
*/
public String getSaslHost()
{
return saslHost;
}
/**
* Sets the FQDN of this SASL host, validated during SASL negotiation.
*
* @param saslHost The FQDN of this SASL host, validated during SASL negotiation.
*/
public void setSaslHost( String saslHost )
{
this.saslHost = saslHost;
}
/**
* Returns the Kerberos principal name for this LDAP service, used by GSSAPI.
*
* @return The Kerberos principal name for this LDAP service, used by GSSAPI.
*/
public String getSaslPrincipal()
{
return saslPrincipal;
}
/**
* Sets the Kerberos principal name for this LDAP service, used by GSSAPI.
*
* @param saslPrincipal The Kerberos principal name for this LDAP service, used by GSSAPI.
*/
public void setSaslPrincipal( String saslPrincipal )
{
this.saslPrincipal = saslPrincipal;
}
/**
* Returns the desired quality-of-protection, used by DIGEST-MD5 and GSSAPI.
*
* @return The desired quality-of-protection, used by DIGEST-MD5 and GSSAPI.
*/
public List<String> getSaslQop()
{
return saslQop;
}
/**
* Sets the desired quality-of-protection, used by DIGEST-MD5 and GSSAPI.
*
* @org.apache.xbean.Property nestedType="java.lang.String"
*
* @param saslQop The desired quality-of-protection, used by DIGEST-MD5 and GSSAPI.
*/
public void setSaslQop( List<String> saslQop )
{
this.saslQop = saslQop;
}
/**
* Returns the realms serviced by this SASL host, used by DIGEST-MD5 and GSSAPI.
*
* @return The realms serviced by this SASL host, used by DIGEST-MD5 and GSSAPI.
*/
public List getSaslRealms()
{
return saslRealms;
}
/**
* Sets the realms serviced by this SASL host, used by DIGEST-MD5 and GSSAPI.
*
* @org.apache.xbean.Property nestedType="java.lang.String"
*
* @param saslRealms The realms serviced by this SASL host, used by DIGEST-MD5 and GSSAPI.
*/
public void setSaslRealms( List<String> saslRealms )
{
this.saslRealms = saslRealms;
}
/**
* Returns the list of supported authentication mechanisms.
*
* @return The list of supported authentication mechanisms.
*/
public Set<String> getSupportedMechanisms()
{
return supportedMechanisms;
}
/**
* Sets the list of supported authentication mechanisms.
*
* @org.apache.xbean.Property propertyEditor="ListEditor" nestedType="java.lang.String"
*
* @param supportedMechanisms The list of supported authentication mechanisms.
*/
public void setSupportedMechanisms( Set<String> supportedMechanisms )
{
this.supportedMechanisms = supportedMechanisms;
}
public DirectoryService getDirectoryService()
{
return directoryService;
}
public Set<String> getSupportedControls()
{
return supportedControls;
}
public void setSupportedControls( Set<String> supportedControls )
{
this.supportedControls = supportedControls;
}
public AbandonHandler getAbandonHandler()
{
return abandonHandler;
}
public void setAbandonHandler( AbandonHandler abandonHandler )
{
this.handler.removeMessageHandler( AbandonRequest.class );
this.abandonHandler = abandonHandler;
this.abandonHandler.setProtocolProvider( this );
//noinspection unchecked
this.handler.addMessageHandler( AbandonRequest.class, this.abandonHandler );
}
public AddHandler getAddHandler()
{
return addHandler;
}
public void setAddHandler( AddHandler addHandler )
{
this.handler.removeMessageHandler( AddRequest.class );
this.addHandler = addHandler;
this.addHandler.setProtocolProvider( this );
//noinspection unchecked
this.handler.addMessageHandler( AddRequest.class, this.addHandler );
}
public BindHandler getBindHandler()
{
return bindHandler;
}
public void setBindHandler( BindHandler bindHandler )
{
this.handler.removeMessageHandler( BindRequest.class );
this.bindHandler = bindHandler;
this.bindHandler.setProtocolProvider( this );
this.bindHandler.setDirectoryService( directoryService );
//noinspection unchecked
this.handler.addMessageHandler( BindRequest.class, this.bindHandler );
}
public CompareHandler getCompareHandler()
{
return compareHandler;
}
public void setCompareHandler( CompareHandler compareHandler )
{
this.handler.removeMessageHandler( CompareRequest.class );
this.compareHandler = compareHandler;
this.compareHandler.setProtocolProvider( this );
//noinspection unchecked
this.handler.addMessageHandler( CompareRequest.class, this.compareHandler );
}
public DeleteHandler getDeleteHandler()
{
return deleteHandler;
}
public void setDeleteHandler( DeleteHandler deleteHandler )
{
this.handler.removeMessageHandler( DeleteRequest.class );
this.deleteHandler = deleteHandler;
this.deleteHandler.setProtocolProvider( this );
//noinspection unchecked
this.handler.addMessageHandler( DeleteRequest.class, this.deleteHandler );
}
public ExtendedHandler getExtendedHandler()
{
return extendedHandler;
}
public void setExtendedHandler( ExtendedHandler extendedHandler )
{
this.handler.removeMessageHandler( ExtendedRequest.class );
this.extendedHandler = extendedHandler;
this.extendedHandler.setProtocolProvider( this );
//noinspection unchecked
this.handler.addMessageHandler( ExtendedRequest.class, this.extendedHandler );
}
public ModifyHandler getModifyHandler()
{
return modifyHandler;
}
public void setModifyHandler( ModifyHandler modifyHandler )
{
this.handler.removeMessageHandler( ModifyRequest.class );
this.modifyHandler = modifyHandler;
this.modifyHandler.setProtocolProvider( this );
//noinspection unchecked
this.handler.addMessageHandler( ModifyRequest.class, this.modifyHandler );
}
public ModifyDnHandler getModifyDnHandler()
{
return modifyDnHandler;
}
public void setModifyDnHandler( ModifyDnHandler modifyDnHandler )
{
this.handler.removeMessageHandler( ModifyDnRequest.class );
this.modifyDnHandler = modifyDnHandler;
this.modifyDnHandler.setProtocolProvider( this );
//noinspection unchecked
this.handler.addMessageHandler( ModifyDnRequest.class, this.modifyDnHandler );
}
public SearchHandler getSearchHandler()
{
return searchHandler;
}
public void setSearchHandler( SearchHandler searchHandler )
{
this.handler.removeMessageHandler( SearchRequest.class );
this.searchHandler = searchHandler;
this.searchHandler.setProtocolProvider( this );
//noinspection unchecked
this.handler.addMessageHandler( SearchRequest.class, this.searchHandler );
}
public UnbindHandler getUnbindHandler()
{
return unbindHandler;
}
public void setUnbindHandler( UnbindHandler unbindHandler )
{
this.handler.removeMessageHandler( UnbindRequest.class );
this.unbindHandler = unbindHandler;
this.unbindHandler.setProtocolProvider( this );
//noinspection unchecked
this.handler.addMessageHandler( UnbindRequest.class, this.unbindHandler );
}
public SessionRegistry getRegistry()
{
return registry;
}
public boolean isStarted()
{
return started;
}
public void setStarted( boolean started )
{
this.started = started;
}
/**
* A snickers based BER Decoder factory.
*/
private static final class ProtocolCodecFactoryImpl implements ProtocolCodecFactory
{
final DirectoryService directoryService;
public ProtocolCodecFactoryImpl( DirectoryService directoryService )
{
this.directoryService = directoryService;
}
public ProtocolEncoder getEncoder()
{
return new Asn1CodecEncoder( new MessageEncoder() );
}
public ProtocolDecoder getDecoder()
{
return new Asn1CodecDecoder( new MessageDecoder( new BinaryAttributeDetector()
{
public boolean isBinary( String id )
{
AttributeTypeRegistry attrRegistry = directoryService.getRegistries().getAttributeTypeRegistry();
try
{
AttributeType type = attrRegistry.lookup( id );
return ! type.getSyntax().isHumanReadable();
}
catch ( NamingException e )
{
return false;
}
}
}) );
}
}
private class LdapProtocolHandler extends DemuxingIoHandler
{
public void sessionCreated( IoSession session ) throws Exception
{
session.setAttribute( LdapServer.class.toString(), this );
IoFilterChain filters = session.getFilterChain();
filters.addLast( "codec", new ProtocolCodecFilter( codecFactory ) );
}
public void sessionClosed( IoSession session )
{
registry.remove( session );
}
public void messageReceived( IoSession session, Object message ) throws Exception
{
// Translate SSLFilter messages into LDAP extended request
// defined in RFC #2830, 'Lightweight Directory Access Protocol (v3):
// Extension for Transport Layer Security'.
//
// The RFC specifies the payload should be empty, but we use
// it to notify the TLS state changes. This hack should be
// OK from the viewpoint of security because StartTLS
// handler should react to only SESSION_UNSECURED message
// and degrade authentication level to 'anonymous' as specified
// in the RFC, and this is no threat.
if ( message == SSLFilter.SESSION_SECURED )
{
ExtendedRequest req = new ExtendedRequestImpl( 0 );
req.setOid( "1.3.6.1.4.1.1466.20037" );
req.setPayload( "SECURED".getBytes( "ISO-8859-1" ) );
message = req;
}
else if ( message == SSLFilter.SESSION_UNSECURED )
{
ExtendedRequest req = new ExtendedRequestImpl( 0 );
req.setOid( "1.3.6.1.4.1.1466.20037" );
req.setPayload( "UNSECURED".getBytes( "ISO-8859-1" ) );
message = req;
}
if ( ( ( Request ) message ).getControls().size() > 0 && message instanceof ResultResponseRequest )
{
ResultResponseRequest req = ( ResultResponseRequest ) message;
for ( Control control1 : req.getControls().values() )
{
MutableControl control = ( MutableControl ) control1;
if ( control.isCritical() && !supportedControls.contains( control.getID() ) )
{
ResultResponse resp = req.getResultResponse();
resp.getLdapResult().setErrorMessage( "Unsupport critical control: " + control.getID() );
resp.getLdapResult().setResultCode( ResultCodeEnum.UNAVAILABLE_CRITICAL_EXTENSION );
session.write( resp );
return;
}
}
}
super.messageReceived( session, message );
}
public void exceptionCaught( IoSession session, Throwable cause )
{
if ( cause.getCause() instanceof ResponseCarryingMessageException )
{
ResponseCarryingMessageException rcme = ( ResponseCarryingMessageException ) cause.getCause();
session.write( rcme.getResponse() );
return;
}
SessionLog.warn( session,
"Unexpected exception forcing session to close: sending disconnect notice to client.", cause );
session.write( NoticeOfDisconnect.PROTOCOLERROR );
registry.remove( session );
session.close();
}
}
}
| fixing bug introduced in protocol handler
git-svn-id: dd90f696ee312d86d1f195500465131112b150f5@584306 13f79535-47bb-0310-9956-ffa450edef68
| protocol-ldap/src/main/java/org/apache/directory/server/ldap/LdapServer.java | fixing bug introduced in protocol handler |
|
Java | apache-2.0 | 108d5a4abf1c56d818f28e1b9aafd169a439d980 | 0 | BasinMC/minecraft-maven-plugin | /*
* Copyright 2016 Johannes Donath <[email protected]>
* and other copyright owners as documented in the project's IP log.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.basinmc.maven.plugins.minecraft.source;
import org.apache.maven.artifact.installer.ArtifactInstallationException;
import org.apache.maven.artifact.resolver.ArtifactResolutionException;
import org.apache.maven.model.License;
import org.apache.maven.model.Model;
import org.apache.maven.model.Organization;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.basinmc.maven.plugins.minecraft.AbstractArtifactMojo;
import org.basinmc.maven.plugins.minecraft.launcher.DownloadDescriptor;
import org.basinmc.maven.plugins.minecraft.launcher.VersionIndex;
import org.basinmc.maven.plugins.minecraft.launcher.VersionMetadata;
import java.io.IOException;
import java.util.NoSuchElementException;
/**
* Fetches a Minecraft module from the remote servers unless a local version is already present
* within the local maven repository.
*
* @author <a href="mailto:[email protected]">Johannes Donath</a>
*/
@Mojo(
name = "fetch",
requiresProject = false,
threadSafe = true,
defaultPhase = LifecyclePhase.INITIALIZE
)
public class FetchModuleMojo extends AbstractArtifactMojo {
/**
* {@inheritDoc}
*/
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
this.verifyProperties("module", "gameVersion");
this.getLog().info("Fetching Minecraft module (" + this.getModule() + " artifact of version " + this.getGameVersion() + ")");
try {
if (!this.findArtifact(this.createArtifact(MINECRAFT_GROUP_ID, this.getModule(), this.getGameVersion(), VANILLA_CLASSIFIER)).isPresent()) {
this.fetchArtifact();
} else {
this.getLog().info("Skipping download of Minecraft module - Located cached artifact");
}
} catch (ArtifactResolutionException ex) {
throw new MojoFailureException("Failed to resolve Minecraft module artifact: " + ex.getMessage(), ex);
} catch (ArtifactInstallationException ex) {
throw new MojoFailureException("Failed to install Minecraft module artifact: " + ex.getMessage(), ex);
} catch (IOException ex) {
throw new MojoFailureException("Failed to read/write temporary file or access remote server: " + ex.getMessage(), ex);
} catch (Exception ex) {
throw new MojoFailureException("Failed to execute task: " + ex.getMessage(), ex);
}
}
/**
* Fetches and installs the Minecraft module artifact.
*/
private void fetchArtifact() throws Exception {
VersionIndex index = VersionIndex.fetch();
VersionMetadata metadata = index.getMetadata(this.getGameVersion()).orElseThrow(() -> new NoSuchElementException("No such game version: " + this.getGameVersion()));
DownloadDescriptor descriptor = ("server".equals(this.getModule()) ? metadata.getServerDownload() : metadata.getClientDownload());
this.temporary((a) -> {
descriptor.fetch(a);
this.temporary((m) -> {
this.getLog().info("Storing Minecraft module as artifact with coordinates " + MINECRAFT_GROUP_ID + ":" + this.getModule() + ":" + this.getGameVersion() + ":jar:" + VANILLA_CLASSIFIER);
{
Model model = new Model();
model.setGroupId(MINECRAFT_GROUP_ID);
model.setArtifactId(this.getModule());
model.setVersion(this.getGameVersion());
model.setPackaging("jar");
Organization organization = new Organization();
organization.setName("Mojang");
organization.setUrl("http://mojang.com");
model.setOrganization(organization);
License license = new License();
license.setName("Mojang EULA");
license.setUrl("https://account.mojang.com/terms");
license.setDistribution("manual");
model.addLicense(license);
}
this.installArtifact(this.createArtifact(MINECRAFT_GROUP_ID, this.getModule(), this.getGameVersion(), VANILLA_CLASSIFIER), a, m);
});
});
}
}
| src/main/java/org/basinmc/maven/plugins/minecraft/source/FetchModuleMojo.java | /*
* Copyright 2016 Johannes Donath <[email protected]>
* and other copyright owners as documented in the project's IP log.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.basinmc.maven.plugins.minecraft.source;
import org.apache.maven.artifact.installer.ArtifactInstallationException;
import org.apache.maven.artifact.resolver.ArtifactResolutionException;
import org.apache.maven.model.License;
import org.apache.maven.model.Model;
import org.apache.maven.model.Organization;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.basinmc.maven.plugins.minecraft.AbstractArtifactMojo;
import org.basinmc.maven.plugins.minecraft.launcher.DownloadDescriptor;
import org.basinmc.maven.plugins.minecraft.launcher.VersionIndex;
import org.basinmc.maven.plugins.minecraft.launcher.VersionMetadata;
import java.io.IOException;
import java.util.NoSuchElementException;
/**
* Fetches a Minecraft module from the remote servers unless a local version is already present
* within the local maven repository.
*
* @author <a href="mailto:[email protected]">Johannes Donath</a>
*/
@Mojo(
name = "fetch",
requiresProject = false,
threadSafe = true,
defaultPhase = LifecyclePhase.INITIALIZE
)
public class FetchModuleMojo extends AbstractArtifactMojo {
/**
* {@inheritDoc}
*/
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
this.verifyProperties("module");
this.getLog().info("Fetching Minecraft module (" + this.getModule() + " artifact of version " + this.getGameVersion() + ")");
try {
if (!this.findArtifact(this.createArtifact(MINECRAFT_GROUP_ID, this.getModule(), this.getGameVersion(), VANILLA_CLASSIFIER)).isPresent()) {
this.fetchArtifact();
} else {
this.getLog().info("Skipping download of Minecraft module - Located cached artifact");
}
} catch (ArtifactResolutionException ex) {
throw new MojoFailureException("Failed to resolve Minecraft module artifact: " + ex.getMessage(), ex);
} catch (ArtifactInstallationException ex) {
throw new MojoFailureException("Failed to install Minecraft module artifact: " + ex.getMessage(), ex);
} catch (IOException ex) {
throw new MojoFailureException("Failed to read/write temporary file or access remote server: " + ex.getMessage(), ex);
} catch (Exception ex) {
throw new MojoFailureException("Failed to execute task: " + ex.getMessage(), ex);
}
}
/**
* Fetches and installs the Minecraft module artifact.
*/
private void fetchArtifact() throws Exception {
VersionIndex index = VersionIndex.fetch();
VersionMetadata metadata = index.getMetadata(this.getGameVersion()).orElseThrow(() -> new NoSuchElementException("No such game version: " + this.getGameVersion()));
DownloadDescriptor descriptor = ("server".equals(this.getModule()) ? metadata.getServerDownload() : metadata.getClientDownload());
this.temporary((a) -> {
descriptor.fetch(a);
this.temporary((m) -> {
this.getLog().info("Storing Minecraft module as artifact with coordinates " + MINECRAFT_GROUP_ID + ":" + this.getModule() + ":" + this.getGameVersion() + ":jar:" + VANILLA_CLASSIFIER);
{
Model model = new Model();
model.setGroupId(MINECRAFT_GROUP_ID);
model.setArtifactId(this.getModule());
model.setVersion(this.getGameVersion());
model.setPackaging("jar");
Organization organization = new Organization();
organization.setName("Mojang");
organization.setUrl("http://mojang.com");
model.setOrganization(organization);
License license = new License();
license.setName("Mojang EULA");
license.setUrl("https://account.mojang.com/terms");
license.setDistribution("manual");
model.addLicense(license);
}
this.installArtifact(this.createArtifact(MINECRAFT_GROUP_ID, this.getModule(), this.getGameVersion(), VANILLA_CLASSIFIER), a, m);
});
});
}
}
| Added gameVersion property to validation to guarantee consistency in case of a change within our validation logic.
| src/main/java/org/basinmc/maven/plugins/minecraft/source/FetchModuleMojo.java | Added gameVersion property to validation to guarantee consistency in case of a change within our validation logic. |
|
Java | apache-2.0 | c22dbadf9996644012411bfbb2b2c3937969b313 | 0 | mmacfadden/orientdb,mmacfadden/orientdb,joansmith/orientdb,orientechnologies/orientdb-etl,joansmith/orientdb,cory-p-oncota/orientdb-etl,mmacfadden/orientdb,joansmith/orientdb,orientechnologies/orientdb,wouterv/orientdb,wouterv/orientdb,orientechnologies/orientdb-etl,wouterv/orientdb,cory-p-oncota/orientdb-etl,bmcgavin/orientdb-etl,orientechnologies/orientdb,mmacfadden/orientdb,orientechnologies/orientdb,wouterv/orientdb,orientechnologies/orientdb,bmcgavin/orientdb-etl,joansmith/orientdb | /*
*
* * Copyright 2010-2014 Orient Technologies LTD (info(at)orientechnologies.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package com.orientechnologies.orient.etl.transformer;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.etl.ETLBaseTest;
import org.junit.Test;
import java.util.Date;
import java.util.List;
/**
* Tests ETL CSV Transformer.
*
* @author Luca Garulli
*/
public class OCSVTransformerTest extends ETLBaseTest {
@Test
public void testEmpty() {
String cfgJson = "{source: { content: { value: '' } }, extractor : { json: {} }, loader: { test: {} } }";
process(cfgJson);
assertEquals(0, getResult().size());
}
@Test
public void testOneObject() {
process("{source: { content: { value: 'name,surname\nJay,Miner' } }, extractor : { row: {} }, transformers: [{ csv: {} }], loader: { test: {} } }");
assertEquals(1, getResult().size());
ODocument doc = getResult().get(0);
assertEquals(2, doc.fields());
assertEquals("Jay", doc.field("name"));
assertEquals("Miner", doc.field("surname"));
}
@Test
public void testSmallSet() {
String content = "name,surname,id";
for (int i = 0; i < names.length; ++i)
content += "\n" + names[i] + "," + surnames[i] + "," + i;
process("{source: { content: { value: '" + content
+ "' } }, extractor : { row: {} }, transformers: [{ csv: {} }], loader: { test: {} } }");
assertEquals(getResult().size(), names.length);
int i = 0;
for (ODocument doc : getResult()) {
assertEquals(3, doc.fields());
assertEquals(names[i], doc.field("name"));
assertEquals(surnames[i], doc.field("surname"));
assertEquals(i, doc.field("id"));
i++;
}
}
@Test
public void testDateTypeAutodetection() {
String cfgJson = "{source: { content: { value: 'BirthDay\n2008-04-30' } }, extractor : { row: {} }, transformers : [{ csv: {} }], loader: { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
Date birthday = doc.field("BirthDay");
assertEquals(2008, birthday.getYear() + 1900);
assertEquals(4, birthday.getMonth() + 1);
assertEquals(30, birthday.getDate());
}
@Test
public void testStringInDblQuotes() throws Exception {
String cfgJson = "{source: { content: { value: 'text\n\"Hello, quotes are here!\"' } }, extractor : { row: {} }, transformers : [{ csv: {} }], loader: { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
String text = doc.field("text");
assertEquals("Hello, quotes are here!", text);
}
@Test
public void testStringStartedFromDigit() throws Exception {
String cfgJson = "{source: { content: { value: 'address\n\"401 Congress Ave, Suite 2450\"' } }, extractor : { row: {} }, transformers : [{ csv: {} }], loader: { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
String address = doc.field("address");
assertEquals("401 Congress Ave, Suite 2450", address);
}
@Test
public void testFloat() {
String cfgJson = "{source: { content: { value: 'firstNumber\n10.78'} }, extractor : { row: {} }, transformers : [{ csv: {} }], loader: { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(10.78f, doc.field("firstNumber"));
}
@Test
public void testFloatWithinQuotes() {
String cfgJson = "{source: { content: { value: 'firstNumber\n\"10.78\"'} }, extractor : { row: {} }, transformers : [{ csv: {} }], loader: { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(10.78f, (Float) doc.field("firstNumber"));
}
@Test
public void testFloatWithinQuotesAndCommaAsDecimalSeparator() {
String cfgJson = "{source: { content: { value: 'firstNumber\n\"10,78\"'} }, extractor : { row: {} }, transformers : [{ csv: {} }], loader: { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(10.78f, (Float) doc.field("firstNumber"));
}
@Test
public void testDouble() {
Double minDouble = 540282346638528870000000000000000000000.0d;
String cfgJson = "{source: { content: { value: 'secondNumber\n540282346638528870000000000000000000000.0'} }, extractor : { row: {} }, transformers : [{ csv: {} }], loader: { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(minDouble, (Double) doc.field("secondNumber"));
}
@Test
public void testDoubleWithingQuotes() {
Double minDouble = 540282346638528870000000000000000000000.0d;
String cfgJson = "{source: { content: { value: 'secondNumber\n\"540282346638528870000000000000000000000.0\"'} }, extractor : { row: {} }, transformers : [{ csv: {} }], loader: { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(minDouble, (Double) doc.field("secondNumber"));
}
@Test
public void testDoubleWithingQuotesAndCommaAsDecimalSeparator() {
Double minDouble = 540282346638528870000000000000000000000.0d;
String cfgJson = "{source: { content: { value: 'secondNumber\n\"540282346638528870000000000000000000000,0\"'} }, extractor : { row: {} }, transformers : [{ csv: {} }], loader: { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(minDouble, (Double) doc.field("secondNumber"));
}
@Test
public void testInteger() {
String cfgJson = "{source: { content: { value: 'number\n100'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Integer(100), (Integer) doc.field("number"));
}
@Test
public void testIntegerWithingQuotes() {
String cfgJson = "{source: { content: { value: 'number\n\"100\"'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Integer(100), (Integer) doc.field("number"));
}
@Test
public void testLong() {
String cfgJson = "{source: { content: { value: 'number\n3000000000'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Long(3000000000L), (Long) doc.field("number"));
}
@Test
public void testLongWithingQuotes() {
String cfgJson = "{source: { content: { value: 'number\n\"3000000000\"'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Long(3000000000L), (Long) doc.field("number"));
}
@Test
public void testGetCellContentSingleQuoted() {
String singleQuotedString = "\"aaa\"";
String unQuotedString = "aaa";
OCSVTransformer ocsvTransformer = new OCSVTransformer();
assertEquals(unQuotedString, ocsvTransformer.getCellContent(singleQuotedString));
}
@Test
public void testGetCellContentDoubleQuoted() {
String doubleQuotedString = "\"\"aaa\"\"";
String unQuotedString = "\"aaa\"";
OCSVTransformer ocsvTransformer = new OCSVTransformer();
assertEquals(unQuotedString, ocsvTransformer.getCellContent(doubleQuotedString));
}
@Test
public void testGetCellContentNullValue() {
OCSVTransformer ocsvTransformer = new OCSVTransformer();
assertEquals(null, ocsvTransformer.getCellContent(null));
}
@Test
public void testGetCellContentWithoutQuoteString() {
String unQuotedString = "aaa";
OCSVTransformer ocsvTransformer = new OCSVTransformer();
assertEquals(unQuotedString, ocsvTransformer.getCellContent(unQuotedString));
}
@Test
public void testIsFiniteFloat() {
OCSVTransformer ocsvTransformer = new OCSVTransformer();
assertFalse(ocsvTransformer.isFinite(Float.NaN));
assertFalse(ocsvTransformer.isFinite(Float.POSITIVE_INFINITY));
assertFalse(ocsvTransformer.isFinite(Float.NEGATIVE_INFINITY));
assertTrue(ocsvTransformer.isFinite(0f));
}
@Test
public void testNullCell() {
String cfgJson = "{source: { content: { value: 'id,postId,text\n1,,Hello'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Integer(1), (Integer) doc.field("id"));
assertNull(doc.field("postId"));
assertEquals("Hello", (String) doc.field("text"));
}
@Test
public void testNullValueInCell() {
String cfgJson = "{source: { content: { value: 'id,postId,text\n1,NULL,Hello'} }, extractor : { row : {} }, transformers : [{ csv : {nullValue: 'NULL'} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Integer(1), (Integer) doc.field("id"));
assertNull(doc.field("postId"));
assertEquals("Hello", (String) doc.field("text"));
}
@Test
public void testNullValueInCellEmptyString() {
String cfgJson = "{source: { content: { value: 'id,title,text\n1,\"\",Hello'} }, extractor : { row : {} }, transformers : [{ csv : {nullValue: 'NULL'} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Integer(1), (Integer) doc.field("id"));
assertEquals("", (String) doc.field("title"));
assertEquals("Hello", (String) doc.field("text"));
}
@Test
public void testQuotedEmptyString() {
String cfgJson = "{source: { content: { value: 'id,title,text\n1,\"\",Hello'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Integer(1), (Integer) doc.field("id"));
assertEquals("", (String) doc.field("title"));
assertEquals("Hello", (String) doc.field("text"));
}
@Test
public void testCRLFDelimiter() {
String cfgJson = "{source: { content: { value: 'id,text,num\r\n1,my test text,1'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Integer(1), (Integer) doc.field("id"));
assertEquals("my test text", (String) doc.field("text"));
assertEquals(new Integer(1), (Integer) doc.field("num"));
}
@Test
public void testEndingLineBreak() {
String cfgJson = "{source: { content: { value: 'id,text,num\r\n1,my test text,1\r\n'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Integer(1), (Integer) doc.field("id"));
assertEquals("my test text", (String) doc.field("text"));
assertEquals(new Integer(1), (Integer) doc.field("num"));
}
@Test
public void testEndingSpaceInFieldName() {
String cfgJson = "{source: { content: { value: 'id ,text ,num \r\n1,my test text,1\r\n'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Integer(1), (Integer) doc.field("id "));
assertNotSame("my test text", (String) doc.field("text"));
assertEquals(new Integer(1), (Integer) doc.field("num "));
}
// @Test
// Temporary disabled on April 13th 2015 by Luca
// public void testCRLFIWithinQuotes() {
// String cfgJson = "{source: { content: { value: 'id ,text ,num \r\n1,\"my test\r\n text\",1\r\n'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
// process(cfgJson);
// List<ODocument> res = getResult();
// ODocument doc = res.get(0);
// assertEquals(new Integer(1), (Integer) doc.field("id "));
// assertEquals("my test\r\n text", (String) doc.field("text "));
// assertEquals(new Integer(1), (Integer) doc.field("num "));
// }
@Test
public void testEscapingDoubleQuotes() {
String cfgJson = "{source: { content: { value: 'id ,text ,num \r\n1,\"my test \"\" text\",1\r\n'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Integer(1), (Integer) doc.field("id "));
assertEquals("my test \"\" text", (String) doc.field("text "));
assertEquals(new Integer(1), (Integer) doc.field("num "));
}
public void testNegativeInteger() {
String cfgJson = "{source: { content: { value: 'id\r\n-1'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Integer(-1), (Integer) doc.field("id"));
}
public void testNegativeFloat() {
String cfgJson = "{source: { content: { value: 'id\r\n-1.0'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Float(-1.0f), (Float) doc.field("id"));
}
}
| src/test/java/com/orientechnologies/orient/etl/transformer/OCSVTransformerTest.java | /*
*
* * Copyright 2010-2014 Orient Technologies LTD (info(at)orientechnologies.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package com.orientechnologies.orient.etl.transformer;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.etl.ETLBaseTest;
import org.junit.Test;
import java.util.Date;
import java.util.List;
/**
* Tests ETL CSV Transformer.
*
* @author Luca Garulli
*/
public class OCSVTransformerTest extends ETLBaseTest {
@Test
public void testEmpty() {
String cfgJson = "{source: { content: { value: '' } }, extractor : { json: {} }, loader: { test: {} } }";
process(cfgJson);
assertEquals(0, getResult().size());
}
@Test
public void testOneObject() {
process("{source: { content: { value: 'name,surname\nJay,Miner' } }, extractor : { row: {} }, transformers: [{ csv: {} }], loader: { test: {} } }");
assertEquals(1, getResult().size());
ODocument doc = getResult().get(0);
assertEquals(2, doc.fields());
assertEquals("Jay", doc.field("name"));
assertEquals("Miner", doc.field("surname"));
}
@Test
public void testSmallSet() {
String content = "name,surname,id";
for (int i = 0; i < names.length; ++i)
content += "\n" + names[i] + "," + surnames[i] + "," + i;
process("{source: { content: { value: '" + content + "' } }, extractor : { row: {} }, transformers: [{ csv: {} }], loader: { test: {} } }");
assertEquals(getResult().size(), names.length);
int i = 0;
for (ODocument doc : getResult()) {
assertEquals(3, doc.fields());
assertEquals(names[i], doc.field("name"));
assertEquals(surnames[i], doc.field("surname"));
assertEquals(i, doc.field("id"));
i++;
}
}
@Test
public void testDateTypeAutodetection(){
String cfgJson = "{source: { content: { value: 'BirthDay\n2008-04-30' } }, extractor : { row: {} }, transformers : [{ csv: {} }], loader: { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
Date birthday = doc.field("BirthDay");
assertEquals(2008, birthday.getYear()+1900);
assertEquals(4, birthday.getMonth()+1);
assertEquals(30, birthday.getDate());
}
@Test
public void testStringInDblQuotes() throws Exception {
String cfgJson = "{source: { content: { value: 'text\n\"Hello, quotes are here!\"' } }, extractor : { row: {} }, transformers : [{ csv: {} }], loader: { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
String text = doc.field("text");
assertEquals("Hello, quotes are here!", text);
}
@Test
public void testStringStartedFromDigit() throws Exception {
String cfgJson = "{source: { content: { value: 'address\n\"401 Congress Ave, Suite 2450\"' } }, extractor : { row: {} }, transformers : [{ csv: {} }], loader: { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
String address = doc.field("address");
assertEquals("401 Congress Ave, Suite 2450", address);
}
@Test
public void testFloat() {
String cfgJson = "{source: { content: { value: 'firstNumber\n10.78'} }, extractor : { row: {} }, transformers : [{ csv: {} }], loader: { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(10.78f, doc.field("firstNumber"));
}
@Test
public void testFloatWithinQuotes() {
String cfgJson = "{source: { content: { value: 'firstNumber\n\"10.78\"'} }, extractor : { row: {} }, transformers : [{ csv: {} }], loader: { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(10.78f, (Float)doc.field("firstNumber"));
}
@Test
public void testFloatWithinQuotesAndCommaAsDecimalSeparator() {
String cfgJson = "{source: { content: { value: 'firstNumber\n\"10,78\"'} }, extractor : { row: {} }, transformers : [{ csv: {} }], loader: { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(10.78f, (Float)doc.field("firstNumber"));
}
@Test
public void testDouble() {
Double minDouble =540282346638528870000000000000000000000.0d;
String cfgJson = "{source: { content: { value: 'secondNumber\n540282346638528870000000000000000000000.0'} }, extractor : { row: {} }, transformers : [{ csv: {} }], loader: { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(minDouble, (Double)doc.field("secondNumber"));
}
@Test
public void testDoubleWithingQuotes() {
Double minDouble = 540282346638528870000000000000000000000.0d;
String cfgJson = "{source: { content: { value: 'secondNumber\n\"540282346638528870000000000000000000000.0\"'} }, extractor : { row: {} }, transformers : [{ csv: {} }], loader: { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(minDouble, (Double) doc.field("secondNumber"));
}
@Test
public void testDoubleWithingQuotesAndCommaAsDecimalSeparator() {
Double minDouble = 540282346638528870000000000000000000000.0d;
String cfgJson = "{source: { content: { value: 'secondNumber\n\"540282346638528870000000000000000000000,0\"'} }, extractor : { row: {} }, transformers : [{ csv: {} }], loader: { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(minDouble, (Double)doc.field("secondNumber"));
}
@Test
public void testInteger() {
String cfgJson = "{source: { content: { value: 'number\n100'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Integer(100), (Integer)doc.field("number"));
}
@Test
public void testIntegerWithingQuotes() {
String cfgJson = "{source: { content: { value: 'number\n\"100\"'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Integer(100), (Integer)doc.field("number"));
}
@Test
public void testLong() {
String cfgJson = "{source: { content: { value: 'number\n3000000000'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Long(3000000000L), (Long)doc.field("number"));
}
@Test
public void testLongWithingQuotes() {
String cfgJson = "{source: { content: { value: 'number\n\"3000000000\"'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Long(3000000000L), (Long)doc.field("number"));
}
@Test
public void testGetCellContentSingleQuoted() {
String singleQuotedString = "\"aaa\"";
String unQuotedString = "aaa";
OCSVTransformer ocsvTransformer = new OCSVTransformer();
assertEquals(unQuotedString, ocsvTransformer.getCellContent(singleQuotedString));
}
@Test
public void testGetCellContentDoubleQuoted() {
String doubleQuotedString = "\"\"aaa\"\"";
String unQuotedString = "\"aaa\"";
OCSVTransformer ocsvTransformer = new OCSVTransformer();
assertEquals(unQuotedString, ocsvTransformer.getCellContent(doubleQuotedString));
}
@Test
public void testGetCellContentNullValue() {
OCSVTransformer ocsvTransformer = new OCSVTransformer();
assertEquals(null, ocsvTransformer.getCellContent(null));
}
@Test
public void testGetCellContentWithoutQuoteString() {
String unQuotedString = "aaa";
OCSVTransformer ocsvTransformer = new OCSVTransformer();
assertEquals(unQuotedString, ocsvTransformer.getCellContent(unQuotedString));
}
@Test
public void testIsFiniteFloat() {
OCSVTransformer ocsvTransformer = new OCSVTransformer();
assertFalse(ocsvTransformer.isFinite(Float.NaN));
assertFalse(ocsvTransformer.isFinite(Float.POSITIVE_INFINITY));
assertFalse(ocsvTransformer.isFinite(Float.NEGATIVE_INFINITY));
assertTrue(ocsvTransformer.isFinite(0f));
}
@Test
public void testNullCell() {
String cfgJson = "{source: { content: { value: 'id,postId,text\n1,,Hello'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Integer(1), (Integer) doc.field("id"));
assertNull(doc.field("postId"));
assertEquals("Hello", (String) doc.field("text"));
}
@Test
public void testNullValueInCell() {
String cfgJson = "{source: { content: { value: 'id,postId,text\n1,NULL,Hello'} }, extractor : { row : {} }, transformers : [{ csv : {nullValue: 'NULL'} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Integer(1), (Integer) doc.field("id"));
assertNull(doc.field("postId"));
assertEquals("Hello", (String) doc.field("text"));
}
@Test
public void testNullValueInCellEmptyString() {
String cfgJson = "{source: { content: { value: 'id,title,text\n1,\"\",Hello'} }, extractor : { row : {} }, transformers : [{ csv : {nullValue: 'NULL'} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Integer(1), (Integer) doc.field("id"));
assertEquals("", (String) doc.field("title"));
assertEquals("Hello", (String) doc.field("text"));
}
@Test
public void testQuotedEmptyString() {
String cfgJson = "{source: { content: { value: 'id,title,text\n1,\"\",Hello'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Integer(1), (Integer) doc.field("id"));
assertEquals("", (String) doc.field("title"));
assertEquals("Hello", (String) doc.field("text"));
}
@Test
public void testCRLFDelimiter() {
String cfgJson = "{source: { content: { value: 'id,text,num\r\n1,my test text,1'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Integer(1), (Integer) doc.field("id"));
assertEquals("my test text", (String) doc.field("text"));
assertEquals(new Integer(1), (Integer) doc.field("num"));
}
@Test
public void testEndingLineBreak() {
String cfgJson = "{source: { content: { value: 'id,text,num\r\n1,my test text,1\r\n'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Integer(1), (Integer) doc.field("id"));
assertEquals("my test text", (String) doc.field("text"));
assertEquals(new Integer(1), (Integer) doc.field("num"));
}
@Test
public void testEndingSpaceInFieldName() {
String cfgJson = "{source: { content: { value: 'id ,text ,num \r\n1,my test text,1\r\n'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Integer(1), (Integer) doc.field("id "));
assertNotSame("my test text", (String) doc.field("text"));
assertEquals(new Integer(1), (Integer) doc.field("num "));
}
@Test
public void testCRLFIWithinQuotes() {
String cfgJson = "{source: { content: { value: 'id ,text ,num \r\n1,\"my test\r\n text\",1\r\n'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Integer(1), (Integer) doc.field("id "));
assertEquals("my test\r\n text", (String) doc.field("text "));
assertEquals(new Integer(1), (Integer) doc.field("num "));
}
@Test
public void testEscapingDoubleQuotes() {
String cfgJson = "{source: { content: { value: 'id ,text ,num \r\n1,\"my test \"\" text\",1\r\n'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Integer(1), (Integer) doc.field("id "));
assertEquals("my test \"\" text", (String) doc.field("text "));
assertEquals(new Integer(1), (Integer) doc.field("num "));
}
public void testNegativeInteger() {
String cfgJson = "{source: { content: { value: 'id\r\n-1'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Integer(-1), (Integer) doc.field("id"));
}
public void testNegativeFloat() {
String cfgJson = "{source: { content: { value: 'id\r\n-1.0'} }, extractor : { row : {} }, transformers : [{ csv : {} }], loader : { test: {} } }";
process(cfgJson);
List<ODocument> res = getResult();
ODocument doc = res.get(0);
assertEquals(new Float(-1.0f), (Float) doc.field("id"));
}
}
| Temp. disabled test because won't pass
| src/test/java/com/orientechnologies/orient/etl/transformer/OCSVTransformerTest.java | Temp. disabled test because won't pass |
|
Java | apache-2.0 | 6daf574d7765aa7257e01fe0551fb17e9ae40861 | 0 | jagguli/intellij-community,caot/intellij-community,alphafoobar/intellij-community,caot/intellij-community,alphafoobar/intellij-community,MichaelNedzelsky/intellij-community,suncycheng/intellij-community,MichaelNedzelsky/intellij-community,suncycheng/intellij-community,slisson/intellij-community,petteyg/intellij-community,fengbaicanhe/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,ibinti/intellij-community,Distrotech/intellij-community,ibinti/intellij-community,clumsy/intellij-community,clumsy/intellij-community,allotria/intellij-community,joewalnes/idea-community,asedunov/intellij-community,TangHao1987/intellij-community,pwoodworth/intellij-community,wreckJ/intellij-community,jagguli/intellij-community,xfournet/intellij-community,orekyuu/intellij-community,ivan-fedorov/intellij-community,FHannes/intellij-community,diorcety/intellij-community,SerCeMan/intellij-community,signed/intellij-community,michaelgallacher/intellij-community,suncycheng/intellij-community,adedayo/intellij-community,samthor/intellij-community,nicolargo/intellij-community,wreckJ/intellij-community,salguarnieri/intellij-community,hurricup/intellij-community,robovm/robovm-studio,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,lucafavatella/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,tmpgit/intellij-community,idea4bsd/idea4bsd,muntasirsyed/intellij-community,alphafoobar/intellij-community,diorcety/intellij-community,clumsy/intellij-community,fitermay/intellij-community,consulo/consulo,pwoodworth/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,pwoodworth/intellij-community,ftomassetti/intellij-community,tmpgit/intellij-community,retomerz/intellij-community,ivan-fedorov/intellij-community,suncycheng/intellij-community,muntasirsyed/intellij-community,suncycheng/intellij-community,SerCeMan/intellij-community,salguarnieri/intellij-community,tmpgit/intellij-community,salguarnieri/intellij-community,consulo/consulo,mglukhikh/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,samthor/intellij-community,petteyg/intellij-community,semonte/intellij-community,holmes/intellij-community,ibinti/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,samthor/intellij-community,orekyuu/intellij-community,fnouama/intellij-community,holmes/intellij-community,jagguli/intellij-community,TangHao1987/intellij-community,ftomassetti/intellij-community,signed/intellij-community,dslomov/intellij-community,ThiagoGarciaAlves/intellij-community,diorcety/intellij-community,vladmm/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,tmpgit/intellij-community,dslomov/intellij-community,kool79/intellij-community,idea4bsd/idea4bsd,kdwink/intellij-community,gnuhub/intellij-community,idea4bsd/idea4bsd,izonder/intellij-community,jagguli/intellij-community,hurricup/intellij-community,youdonghai/intellij-community,muntasirsyed/intellij-community,blademainer/intellij-community,tmpgit/intellij-community,vladmm/intellij-community,semonte/intellij-community,clumsy/intellij-community,semonte/intellij-community,supersven/intellij-community,pwoodworth/intellij-community,lucafavatella/intellij-community,dslomov/intellij-community,gnuhub/intellij-community,fitermay/intellij-community,da1z/intellij-community,ernestp/consulo,allotria/intellij-community,ryano144/intellij-community,Distrotech/intellij-community,ftomassetti/intellij-community,salguarnieri/intellij-community,apixandru/intellij-community,da1z/intellij-community,fengbaicanhe/intellij-community,ahb0327/intellij-community,caot/intellij-community,semonte/intellij-community,clumsy/intellij-community,allotria/intellij-community,ahb0327/intellij-community,da1z/intellij-community,nicolargo/intellij-community,akosyakov/intellij-community,TangHao1987/intellij-community,caot/intellij-community,nicolargo/intellij-community,diorcety/intellij-community,ibinti/intellij-community,FHannes/intellij-community,Distrotech/intellij-community,caot/intellij-community,supersven/intellij-community,ryano144/intellij-community,blademainer/intellij-community,robovm/robovm-studio,idea4bsd/idea4bsd,akosyakov/intellij-community,ivan-fedorov/intellij-community,ahb0327/intellij-community,samthor/intellij-community,clumsy/intellij-community,samthor/intellij-community,da1z/intellij-community,joewalnes/idea-community,salguarnieri/intellij-community,kdwink/intellij-community,fengbaicanhe/intellij-community,lucafavatella/intellij-community,vvv1559/intellij-community,orekyuu/intellij-community,youdonghai/intellij-community,hurricup/intellij-community,fengbaicanhe/intellij-community,fnouama/intellij-community,holmes/intellij-community,ivan-fedorov/intellij-community,amith01994/intellij-community,ThiagoGarciaAlves/intellij-community,vladmm/intellij-community,amith01994/intellij-community,Lekanich/intellij-community,robovm/robovm-studio,michaelgallacher/intellij-community,hurricup/intellij-community,joewalnes/idea-community,fitermay/intellij-community,akosyakov/intellij-community,MichaelNedzelsky/intellij-community,retomerz/intellij-community,asedunov/intellij-community,signed/intellij-community,Lekanich/intellij-community,izonder/intellij-community,MER-GROUP/intellij-community,diorcety/intellij-community,apixandru/intellij-community,ibinti/intellij-community,retomerz/intellij-community,kdwink/intellij-community,muntasirsyed/intellij-community,SerCeMan/intellij-community,signed/intellij-community,vvv1559/intellij-community,ivan-fedorov/intellij-community,fnouama/intellij-community,allotria/intellij-community,slisson/intellij-community,joewalnes/idea-community,kdwink/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,slisson/intellij-community,ryano144/intellij-community,ol-loginov/intellij-community,holmes/intellij-community,ol-loginov/intellij-community,blademainer/intellij-community,kool79/intellij-community,ol-loginov/intellij-community,SerCeMan/intellij-community,akosyakov/intellij-community,xfournet/intellij-community,xfournet/intellij-community,da1z/intellij-community,ol-loginov/intellij-community,adedayo/intellij-community,idea4bsd/idea4bsd,Lekanich/intellij-community,ernestp/consulo,MER-GROUP/intellij-community,kool79/intellij-community,kdwink/intellij-community,asedunov/intellij-community,robovm/robovm-studio,da1z/intellij-community,jagguli/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,fnouama/intellij-community,petteyg/intellij-community,alphafoobar/intellij-community,supersven/intellij-community,muntasirsyed/intellij-community,ol-loginov/intellij-community,mglukhikh/intellij-community,clumsy/intellij-community,mglukhikh/intellij-community,MichaelNedzelsky/intellij-community,lucafavatella/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,orekyuu/intellij-community,ahb0327/intellij-community,robovm/robovm-studio,adedayo/intellij-community,robovm/robovm-studio,petteyg/intellij-community,asedunov/intellij-community,amith01994/intellij-community,ivan-fedorov/intellij-community,petteyg/intellij-community,retomerz/intellij-community,adedayo/intellij-community,ivan-fedorov/intellij-community,ftomassetti/intellij-community,dslomov/intellij-community,ahb0327/intellij-community,adedayo/intellij-community,supersven/intellij-community,kool79/intellij-community,izonder/intellij-community,da1z/intellij-community,fnouama/intellij-community,ivan-fedorov/intellij-community,MER-GROUP/intellij-community,ol-loginov/intellij-community,fengbaicanhe/intellij-community,kool79/intellij-community,FHannes/intellij-community,jagguli/intellij-community,xfournet/intellij-community,ftomassetti/intellij-community,slisson/intellij-community,idea4bsd/idea4bsd,Distrotech/intellij-community,nicolargo/intellij-community,nicolargo/intellij-community,dslomov/intellij-community,ibinti/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,kool79/intellij-community,gnuhub/intellij-community,izonder/intellij-community,FHannes/intellij-community,jagguli/intellij-community,clumsy/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,amith01994/intellij-community,xfournet/intellij-community,caot/intellij-community,gnuhub/intellij-community,alphafoobar/intellij-community,TangHao1987/intellij-community,MER-GROUP/intellij-community,idea4bsd/idea4bsd,SerCeMan/intellij-community,salguarnieri/intellij-community,alphafoobar/intellij-community,michaelgallacher/intellij-community,diorcety/intellij-community,michaelgallacher/intellij-community,idea4bsd/idea4bsd,caot/intellij-community,akosyakov/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,amith01994/intellij-community,vladmm/intellij-community,wreckJ/intellij-community,ThiagoGarciaAlves/intellij-community,slisson/intellij-community,retomerz/intellij-community,ernestp/consulo,blademainer/intellij-community,apixandru/intellij-community,allotria/intellij-community,ibinti/intellij-community,hurricup/intellij-community,semonte/intellij-community,TangHao1987/intellij-community,consulo/consulo,orekyuu/intellij-community,gnuhub/intellij-community,hurricup/intellij-community,signed/intellij-community,semonte/intellij-community,ftomassetti/intellij-community,Lekanich/intellij-community,samthor/intellij-community,fitermay/intellij-community,MER-GROUP/intellij-community,MER-GROUP/intellij-community,ernestp/consulo,ThiagoGarciaAlves/intellij-community,akosyakov/intellij-community,alphafoobar/intellij-community,ryano144/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,signed/intellij-community,joewalnes/idea-community,akosyakov/intellij-community,petteyg/intellij-community,MER-GROUP/intellij-community,hurricup/intellij-community,gnuhub/intellij-community,wreckJ/intellij-community,holmes/intellij-community,asedunov/intellij-community,muntasirsyed/intellij-community,ibinti/intellij-community,blademainer/intellij-community,allotria/intellij-community,ivan-fedorov/intellij-community,MichaelNedzelsky/intellij-community,alphafoobar/intellij-community,kdwink/intellij-community,ol-loginov/intellij-community,consulo/consulo,da1z/intellij-community,apixandru/intellij-community,samthor/intellij-community,alphafoobar/intellij-community,dslomov/intellij-community,SerCeMan/intellij-community,apixandru/intellij-community,TangHao1987/intellij-community,blademainer/intellij-community,dslomov/intellij-community,Distrotech/intellij-community,retomerz/intellij-community,retomerz/intellij-community,lucafavatella/intellij-community,supersven/intellij-community,TangHao1987/intellij-community,vladmm/intellij-community,pwoodworth/intellij-community,Lekanich/intellij-community,muntasirsyed/intellij-community,adedayo/intellij-community,ahb0327/intellij-community,blademainer/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,orekyuu/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,MichaelNedzelsky/intellij-community,slisson/intellij-community,pwoodworth/intellij-community,jagguli/intellij-community,Lekanich/intellij-community,lucafavatella/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,gnuhub/intellij-community,petteyg/intellij-community,fitermay/intellij-community,retomerz/intellij-community,joewalnes/idea-community,vvv1559/intellij-community,ftomassetti/intellij-community,nicolargo/intellij-community,michaelgallacher/intellij-community,fengbaicanhe/intellij-community,semonte/intellij-community,consulo/consulo,blademainer/intellij-community,suncycheng/intellij-community,muntasirsyed/intellij-community,vladmm/intellij-community,ThiagoGarciaAlves/intellij-community,amith01994/intellij-community,pwoodworth/intellij-community,signed/intellij-community,suncycheng/intellij-community,nicolargo/intellij-community,MichaelNedzelsky/intellij-community,ol-loginov/intellij-community,ryano144/intellij-community,allotria/intellij-community,apixandru/intellij-community,joewalnes/idea-community,apixandru/intellij-community,supersven/intellij-community,dslomov/intellij-community,adedayo/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,asedunov/intellij-community,hurricup/intellij-community,allotria/intellij-community,ivan-fedorov/intellij-community,MER-GROUP/intellij-community,kdwink/intellij-community,ftomassetti/intellij-community,akosyakov/intellij-community,izonder/intellij-community,SerCeMan/intellij-community,izonder/intellij-community,retomerz/intellij-community,clumsy/intellij-community,kool79/intellij-community,robovm/robovm-studio,FHannes/intellij-community,apixandru/intellij-community,Distrotech/intellij-community,Lekanich/intellij-community,adedayo/intellij-community,Distrotech/intellij-community,izonder/intellij-community,kool79/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,MER-GROUP/intellij-community,joewalnes/idea-community,vladmm/intellij-community,wreckJ/intellij-community,ol-loginov/intellij-community,samthor/intellij-community,FHannes/intellij-community,supersven/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,lucafavatella/intellij-community,tmpgit/intellij-community,holmes/intellij-community,da1z/intellij-community,samthor/intellij-community,Lekanich/intellij-community,signed/intellij-community,ibinti/intellij-community,xfournet/intellij-community,MER-GROUP/intellij-community,kool79/intellij-community,nicolargo/intellij-community,asedunov/intellij-community,amith01994/intellij-community,alphafoobar/intellij-community,petteyg/intellij-community,slisson/intellij-community,ivan-fedorov/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,wreckJ/intellij-community,dslomov/intellij-community,Lekanich/intellij-community,izonder/intellij-community,holmes/intellij-community,caot/intellij-community,youdonghai/intellij-community,ahb0327/intellij-community,orekyuu/intellij-community,holmes/intellij-community,Lekanich/intellij-community,robovm/robovm-studio,robovm/robovm-studio,xfournet/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,Lekanich/intellij-community,orekyuu/intellij-community,alphafoobar/intellij-community,joewalnes/idea-community,ryano144/intellij-community,semonte/intellij-community,vladmm/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,amith01994/intellij-community,Distrotech/intellij-community,ahb0327/intellij-community,wreckJ/intellij-community,slisson/intellij-community,kool79/intellij-community,jagguli/intellij-community,ftomassetti/intellij-community,petteyg/intellij-community,slisson/intellij-community,akosyakov/intellij-community,wreckJ/intellij-community,tmpgit/intellij-community,diorcety/intellij-community,youdonghai/intellij-community,supersven/intellij-community,tmpgit/intellij-community,apixandru/intellij-community,amith01994/intellij-community,izonder/intellij-community,gnuhub/intellij-community,akosyakov/intellij-community,da1z/intellij-community,semonte/intellij-community,vvv1559/intellij-community,blademainer/intellij-community,salguarnieri/intellij-community,xfournet/intellij-community,jagguli/intellij-community,michaelgallacher/intellij-community,ThiagoGarciaAlves/intellij-community,tmpgit/intellij-community,kdwink/intellij-community,pwoodworth/intellij-community,blademainer/intellij-community,xfournet/intellij-community,ibinti/intellij-community,retomerz/intellij-community,xfournet/intellij-community,pwoodworth/intellij-community,fnouama/intellij-community,MichaelNedzelsky/intellij-community,ryano144/intellij-community,adedayo/intellij-community,diorcety/intellij-community,wreckJ/intellij-community,amith01994/intellij-community,ol-loginov/intellij-community,fitermay/intellij-community,da1z/intellij-community,allotria/intellij-community,petteyg/intellij-community,caot/intellij-community,muntasirsyed/intellij-community,michaelgallacher/intellij-community,nicolargo/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,apixandru/intellij-community,fitermay/intellij-community,allotria/intellij-community,FHannes/intellij-community,apixandru/intellij-community,kdwink/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,consulo/consulo,robovm/robovm-studio,supersven/intellij-community,samthor/intellij-community,gnuhub/intellij-community,izonder/intellij-community,ftomassetti/intellij-community,ryano144/intellij-community,hurricup/intellij-community,asedunov/intellij-community,wreckJ/intellij-community,signed/intellij-community,Distrotech/intellij-community,supersven/intellij-community,diorcety/intellij-community,ernestp/consulo,SerCeMan/intellij-community,retomerz/intellij-community,dslomov/intellij-community,fengbaicanhe/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,retomerz/intellij-community,vladmm/intellij-community,Lekanich/intellij-community,slisson/intellij-community,slisson/intellij-community,muntasirsyed/intellij-community,kdwink/intellij-community,signed/intellij-community,fengbaicanhe/intellij-community,orekyuu/intellij-community,vvv1559/intellij-community,muntasirsyed/intellij-community,diorcety/intellij-community,adedayo/intellij-community,fengbaicanhe/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,pwoodworth/intellij-community,blademainer/intellij-community,lucafavatella/intellij-community,caot/intellij-community,MichaelNedzelsky/intellij-community,clumsy/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,fengbaicanhe/intellij-community,caot/intellij-community,FHannes/intellij-community,orekyuu/intellij-community,fnouama/intellij-community,semonte/intellij-community,fnouama/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,holmes/intellij-community,vladmm/intellij-community,SerCeMan/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,ernestp/consulo,fengbaicanhe/intellij-community,supersven/intellij-community,dslomov/intellij-community,salguarnieri/intellij-community,Distrotech/intellij-community,supersven/intellij-community,fnouama/intellij-community,samthor/intellij-community,nicolargo/intellij-community,ibinti/intellij-community,caot/intellij-community,MER-GROUP/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,clumsy/intellij-community,suncycheng/intellij-community,kool79/intellij-community,semonte/intellij-community,pwoodworth/intellij-community,salguarnieri/intellij-community,muntasirsyed/intellij-community,kool79/intellij-community,dslomov/intellij-community,orekyuu/intellij-community,ahb0327/intellij-community,kdwink/intellij-community,tmpgit/intellij-community,orekyuu/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,slisson/intellij-community,asedunov/intellij-community,ahb0327/intellij-community,da1z/intellij-community,MichaelNedzelsky/intellij-community,amith01994/intellij-community,jagguli/intellij-community,ryano144/intellij-community,signed/intellij-community,lucafavatella/intellij-community,samthor/intellij-community,Distrotech/intellij-community,lucafavatella/intellij-community,vladmm/intellij-community,vvv1559/intellij-community,idea4bsd/idea4bsd,petteyg/intellij-community,signed/intellij-community,izonder/intellij-community,ol-loginov/intellij-community,akosyakov/intellij-community,fitermay/intellij-community,ftomassetti/intellij-community,wreckJ/intellij-community,ivan-fedorov/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,SerCeMan/intellij-community,ryano144/intellij-community,MichaelNedzelsky/intellij-community,signed/intellij-community,wreckJ/intellij-community,robovm/robovm-studio,da1z/intellij-community,SerCeMan/intellij-community,nicolargo/intellij-community,ftomassetti/intellij-community,pwoodworth/intellij-community,clumsy/intellij-community,fengbaicanhe/intellij-community,fnouama/intellij-community,kdwink/intellij-community,ibinti/intellij-community,gnuhub/intellij-community,allotria/intellij-community,TangHao1987/intellij-community,ibinti/intellij-community,apixandru/intellij-community,MER-GROUP/intellij-community,salguarnieri/intellij-community,holmes/intellij-community,ahb0327/intellij-community,SerCeMan/intellij-community,holmes/intellij-community,holmes/intellij-community,fnouama/intellij-community,jagguli/intellij-community,ol-loginov/intellij-community,allotria/intellij-community,youdonghai/intellij-community,gnuhub/intellij-community,blademainer/intellij-community,fitermay/intellij-community,TangHao1987/intellij-community,robovm/robovm-studio,vvv1559/intellij-community,asedunov/intellij-community,ryano144/intellij-community,vladmm/intellij-community,nicolargo/intellij-community,izonder/intellij-community,fnouama/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,Distrotech/intellij-community,michaelgallacher/intellij-community,TangHao1987/intellij-community,ryano144/intellij-community,MichaelNedzelsky/intellij-community,ahb0327/intellij-community,amith01994/intellij-community,youdonghai/intellij-community,TangHao1987/intellij-community | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.svn;
import com.intellij.notification.NotificationType;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.vcs.impl.GenericNotifierImpl;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.ThreeState;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.svn.dialogs.SvnInteractiveAuthenticationProvider;
import org.tmatesoft.svn.core.SVNAuthenticationException;
import org.tmatesoft.svn.core.SVNCancelException;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.auth.ISVNAuthenticationManager;
import org.tmatesoft.svn.core.auth.SVNAuthentication;
import org.tmatesoft.svn.core.wc.SVNRevision;
import org.tmatesoft.svn.core.wc.SVNWCClient;
public class SvnAuthenticationNotifier extends GenericNotifierImpl<SvnAuthenticationNotifier.AuthenticationRequest, SVNURL> {
private static final Logger LOG = Logger.getInstance("#org.jetbrains.idea.svn.SvnAuthenticationNotifier");
private static final String ourGroupId = "SubversionId";
private final SvnVcs myVcs;
private final RootsToWorkingCopies myRootsToWorkingCopies;
public SvnAuthenticationNotifier(final SvnVcs svnVcs) {
super(svnVcs.getProject(), ourGroupId, "Not Logged To Subversion", NotificationType.ERROR);
myVcs = svnVcs;
myRootsToWorkingCopies = myVcs.getRootsToWorkingCopies();
}
@Override
protected boolean ask(final AuthenticationRequest obj) {
final Ref<Boolean> resultRef = new Ref<Boolean>();
final boolean done = ProgressManager.getInstance().runProcessWithProgressSynchronously(new Runnable() {
public void run() {
final boolean result = interactiveValidation(obj.myProject, obj.getUrl(), obj.getRealm(), obj.getKind());
log("ask result for: " + obj.getUrl() + " is: " + result);
resultRef.set(result);
if (result) {
onStateChangedToSuccess(obj);
}
}
}, "Checking authorization state", true, myVcs.getProject());
return done && Boolean.TRUE.equals(resultRef.get());
}
private void onStateChangedToSuccess(final AuthenticationRequest obj) {
myVcs.invokeRefreshSvnRoots(false);
/*ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
myVcs.invokeRefreshSvnRoots(false);
}
});*/
/*final List<SVNURL> outdatedRequests = new LinkedList<SVNURL>();
final Collection<SVNURL> keys = getAllCurrentKeys();
for (SVNURL key : keys) {
final SVNURL commonURLAncestor = SVNURLUtil.getCommonURLAncestor(key, obj.getUrl());
if ((! StringUtil.isEmptyOrSpaces(commonURLAncestor.getHost())) && (! StringUtil.isEmptyOrSpaces(commonURLAncestor.getPath()))) {
final AuthenticationRequest currObj = getObj(key);
if ((currObj != null) && passiveValidation(myVcs.getProject(), key, true, currObj.getRealm(), currObj.getKind())) {
outdatedRequests.add(key);
}
}
}
log("on state changed ");
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
for (SVNURL key : outdatedRequests) {
removeLazyNotificationByKey(key);
}
}
}, ModalityState.NON_MODAL); */
}
@Override
public void ensureNotify(AuthenticationRequest obj) {
/*ChangesViewBalloonProblemNotifier.showMe(myVcs.getProject(), "You are not authenticated to '" + obj.getRealm() + "'." +
"To login, see pending notifications.", MessageType.ERROR);*/
super.ensureNotify(obj);
}
@NotNull
@Override
public SVNURL getKey(final AuthenticationRequest obj) {
// !!! wc's URL
return obj.getWcUrl();
}
@Nullable
public SVNURL getWcUrl(final AuthenticationRequest obj) {
if (obj.isOutsideCopies()) return null;
if (obj.getWcUrl() != null) return obj.getWcUrl();
final WorkingCopy copy = myRootsToWorkingCopies.getMatchingCopy(obj.getUrl());
if (copy != null) {
obj.setOutsideCopies(false);
obj.setWcUrl(copy.getUrl());
} else {
obj.setOutsideCopies(true);
}
return copy == null ? null : copy.getUrl();
}
/**
* Bases on presence of notifications!
*/
public ThreeState isAuthenticatedFor(final VirtualFile vf) {
final WorkingCopy wcCopy = myRootsToWorkingCopies.getWcRoot(vf);
if (wcCopy == null) return ThreeState.UNSURE;
// check there's no cancellation yet
final boolean haveCancellation = getStateFor(wcCopy.getUrl());
if (haveCancellation) return ThreeState.NO;
// check have credentials
return passiveValidation(myVcs.getProject(), wcCopy.getUrl()) ? ThreeState.YES : ThreeState.NO;
}
@NotNull
@Override
protected String getNotificationContent(AuthenticationRequest obj) {
return "<a href=\"\">Click to fix.</a> Not logged to Subversion '" + obj.getRealm() + "' (" + obj.getUrl().toDecodedString() + ")";
}
@NotNull
@Override
protected String getToString(AuthenticationRequest obj) {
return "Click to fix. Not logged to Subversion '" + obj.getRealm() + "' (" + obj.getUrl().toDecodedString() + ")";
}
public static class AuthenticationRequest {
private final Project myProject;
private final String myKind;
private final SVNURL myUrl;
private final String myRealm;
private SVNURL myWcUrl;
private boolean myOutsideCopies;
public AuthenticationRequest(Project project, String kind, SVNURL url, String realm) {
myProject = project;
myKind = kind;
myUrl = url;
myRealm = realm;
}
public boolean isOutsideCopies() {
return myOutsideCopies;
}
public void setOutsideCopies(boolean outsideCopies) {
myOutsideCopies = outsideCopies;
}
public SVNURL getWcUrl() {
return myWcUrl;
}
public void setWcUrl(SVNURL wcUrl) {
myWcUrl = wcUrl;
}
public String getKind() {
return myKind;
}
public SVNURL getUrl() {
return myUrl;
}
public String getRealm() {
return myRealm;
}
}
static void log(final Throwable t) {
LOG.debug(t);
}
static void log(final String s) {
LOG.debug(s);
}
public static boolean passiveValidation(final Project project, final SVNURL url) {
final SvnConfiguration configuration = SvnConfiguration.getInstance(project);
final ISVNAuthenticationManager passiveManager = configuration.getPassiveAuthenticationManager();
return validationImpl(project, url, configuration, passiveManager, false, null, null);
}
public static boolean interactiveValidation(final Project project, final SVNURL url, final String realm, final String kind) {
final SvnConfiguration configuration = SvnConfiguration.getInstance(project);
final ISVNAuthenticationManager passiveManager = configuration.getInteractiveManager(SvnVcs.getInstance(project));
return validationImpl(project, url, configuration, passiveManager, true, realm, kind);
}
private static boolean validationImpl(final Project project, final SVNURL url,
final SvnConfiguration configuration, final ISVNAuthenticationManager manager,
final boolean checkWrite, final String realm, final String kind/*, final boolean passive*/) {
SvnInteractiveAuthenticationProvider.clearCallState();
try {
new SVNWCClient(manager, configuration.getOptions(project)).doInfo(url, SVNRevision.UNDEFINED, SVNRevision.HEAD);
} catch (SVNAuthenticationException e) {
log(e);
return false;
} catch (SVNCancelException e) {
log(e); // auth canceled
return false;
} catch (SVNException e) {
if (e.getErrorMessage().getErrorCode().isAuthentication()) {
log(e);
return false;
}
LOG.info("some other exc", e);
}
if (! checkWrite) {
return true;
}
/*if (passive) {
return SvnInteractiveAuthenticationProvider.wasCalled();
}*/
if (SvnInteractiveAuthenticationProvider.wasCalled() && SvnInteractiveAuthenticationProvider.wasCancelled()) return false;
if (SvnInteractiveAuthenticationProvider.wasCalled()) return true;
final SvnVcs svnVcs = SvnVcs.getInstance(project);
final SvnInteractiveAuthenticationProvider provider = new SvnInteractiveAuthenticationProvider(svnVcs);
final SVNAuthentication svnAuthentication = provider.requestClientAuthentication(kind, url, realm, null, null, true);
if (svnAuthentication != null) {
configuration.acknowledge(kind, realm, svnAuthentication);
/*try {
configuration.getAuthenticationManager(svnVcs).acknowledgeAuthentication(true, kind, realm, null, svnAuthentication);
}
catch (SVNException e) {
LOG.info(e);
// acknowledge at least in runtime
configuration.acknowledge(kind, realm, svnAuthentication);
}*/
return true;
}
return false;
}
}
| plugins/svn4idea/src/org/jetbrains/idea/svn/SvnAuthenticationNotifier.java | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.svn;
import com.intellij.notification.NotificationType;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.vcs.impl.GenericNotifierImpl;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.ThreeState;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.svn.dialogs.SvnInteractiveAuthenticationProvider;
import org.tmatesoft.svn.core.SVNAuthenticationException;
import org.tmatesoft.svn.core.SVNCancelException;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.auth.ISVNAuthenticationManager;
import org.tmatesoft.svn.core.auth.SVNAuthentication;
import org.tmatesoft.svn.core.wc.SVNRevision;
import org.tmatesoft.svn.core.wc.SVNWCClient;
public class SvnAuthenticationNotifier extends GenericNotifierImpl<SvnAuthenticationNotifier.AuthenticationRequest, SVNURL> {
private static final Logger LOG = Logger.getInstance("#org.jetbrains.idea.svn.SvnAuthenticationNotifier");
private static final String ourGroupId = "SubversionId";
private final SvnVcs myVcs;
private final RootsToWorkingCopies myRootsToWorkingCopies;
public SvnAuthenticationNotifier(final SvnVcs svnVcs) {
super(svnVcs.getProject(), ourGroupId, "Not Logged To Subversion", NotificationType.ERROR);
myVcs = svnVcs;
myRootsToWorkingCopies = myVcs.getRootsToWorkingCopies();
}
@Override
protected boolean ask(final AuthenticationRequest obj) {
final Ref<Boolean> resultRef = new Ref<Boolean>();
final boolean done = ProgressManager.getInstance().runProcessWithProgressSynchronously(new Runnable() {
public void run() {
final boolean result = interactiveValidation(obj.myProject, obj.getUrl(), obj.getRealm(), obj.getKind());
log("ask result for: " + obj.getUrl() + " is: " + result);
resultRef.set(result);
if (result) {
onStateChangedToSuccess(obj);
}
}
}, "Checking authorization state", true, myVcs.getProject());
return done && Boolean.TRUE.equals(resultRef.get());
}
private void onStateChangedToSuccess(final AuthenticationRequest obj) {
myVcs.invokeRefreshSvnRoots(false);
/*ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
myVcs.invokeRefreshSvnRoots(false);
}
});*/
/*final List<SVNURL> outdatedRequests = new LinkedList<SVNURL>();
final Collection<SVNURL> keys = getAllCurrentKeys();
for (SVNURL key : keys) {
final SVNURL commonURLAncestor = SVNURLUtil.getCommonURLAncestor(key, obj.getUrl());
if ((! StringUtil.isEmptyOrSpaces(commonURLAncestor.getHost())) && (! StringUtil.isEmptyOrSpaces(commonURLAncestor.getPath()))) {
final AuthenticationRequest currObj = getObj(key);
if ((currObj != null) && passiveValidation(myVcs.getProject(), key, true, currObj.getRealm(), currObj.getKind())) {
outdatedRequests.add(key);
}
}
}
log("on state changed ");
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
for (SVNURL key : outdatedRequests) {
removeLazyNotificationByKey(key);
}
}
}, ModalityState.NON_MODAL); */
}
@Override
public void ensureNotify(AuthenticationRequest obj) {
/*ChangesViewBalloonProblemNotifier.showMe(myVcs.getProject(), "You are not authenticated to '" + obj.getRealm() + "'." +
"To login, see pending notifications.", MessageType.ERROR);*/
super.ensureNotify(obj);
}
@NotNull
@Override
public SVNURL getKey(final AuthenticationRequest obj) {
// !!! wc's URL
return obj.getWcUrl();
}
@Nullable
public SVNURL getWcUrl(final AuthenticationRequest obj) {
if (obj.isOutsideCopies()) return null;
if (obj.getWcUrl() != null) return obj.getWcUrl();
final WorkingCopy copy = myRootsToWorkingCopies.getMatchingCopy(obj.getUrl());
if (copy != null) {
obj.setOutsideCopies(false);
obj.setWcUrl(copy.getUrl());
} else {
obj.setOutsideCopies(true);
}
return copy == null ? null : copy.getUrl();
}
/**
* Bases on presence of notifications!
*/
public ThreeState isAuthenticatedFor(final VirtualFile vf) {
final WorkingCopy wcCopy = myRootsToWorkingCopies.getWcRoot(vf);
if (wcCopy == null) return ThreeState.UNSURE;
// check there's no cancellation yet
final boolean haveCancellation = getStateFor(wcCopy.getUrl());
if (! haveCancellation) return ThreeState.NO;
// check have credentials
return passiveValidation(myVcs.getProject(), wcCopy.getUrl()) ? ThreeState.YES : ThreeState.NO;
}
@NotNull
@Override
protected String getNotificationContent(AuthenticationRequest obj) {
return "<a href=\"\">Click to fix.</a> Not logged to Subversion '" + obj.getRealm() + "' (" + obj.getUrl().toDecodedString() + ")";
}
@NotNull
@Override
protected String getToString(AuthenticationRequest obj) {
return "Click to fix. Not logged to Subversion '" + obj.getRealm() + "' (" + obj.getUrl().toDecodedString() + ")";
}
public static class AuthenticationRequest {
private final Project myProject;
private final String myKind;
private final SVNURL myUrl;
private final String myRealm;
private SVNURL myWcUrl;
private boolean myOutsideCopies;
public AuthenticationRequest(Project project, String kind, SVNURL url, String realm) {
myProject = project;
myKind = kind;
myUrl = url;
myRealm = realm;
}
public boolean isOutsideCopies() {
return myOutsideCopies;
}
public void setOutsideCopies(boolean outsideCopies) {
myOutsideCopies = outsideCopies;
}
public SVNURL getWcUrl() {
return myWcUrl;
}
public void setWcUrl(SVNURL wcUrl) {
myWcUrl = wcUrl;
}
public String getKind() {
return myKind;
}
public SVNURL getUrl() {
return myUrl;
}
public String getRealm() {
return myRealm;
}
}
static void log(final Throwable t) {
LOG.debug(t);
}
static void log(final String s) {
LOG.debug(s);
}
public static boolean passiveValidation(final Project project, final SVNURL url) {
final SvnConfiguration configuration = SvnConfiguration.getInstance(project);
final ISVNAuthenticationManager passiveManager = configuration.getPassiveAuthenticationManager();
return validationImpl(project, url, configuration, passiveManager, false, null, null);
}
public static boolean interactiveValidation(final Project project, final SVNURL url, final String realm, final String kind) {
final SvnConfiguration configuration = SvnConfiguration.getInstance(project);
final ISVNAuthenticationManager passiveManager = configuration.getInteractiveManager(SvnVcs.getInstance(project));
return validationImpl(project, url, configuration, passiveManager, true, realm, kind);
}
private static boolean validationImpl(final Project project, final SVNURL url,
final SvnConfiguration configuration, final ISVNAuthenticationManager manager,
final boolean checkWrite, final String realm, final String kind/*, final boolean passive*/) {
SvnInteractiveAuthenticationProvider.clearCallState();
try {
new SVNWCClient(manager, configuration.getOptions(project)).doInfo(url, SVNRevision.UNDEFINED, SVNRevision.HEAD);
} catch (SVNAuthenticationException e) {
log(e);
return false;
} catch (SVNCancelException e) {
log(e); // auth canceled
return false;
} catch (SVNException e) {
if (e.getErrorMessage().getErrorCode().isAuthentication()) {
log(e);
return false;
}
LOG.info("some other exc", e);
}
if (! checkWrite) {
return true;
}
/*if (passive) {
return SvnInteractiveAuthenticationProvider.wasCalled();
}*/
if (SvnInteractiveAuthenticationProvider.wasCalled() && SvnInteractiveAuthenticationProvider.wasCancelled()) return false;
if (SvnInteractiveAuthenticationProvider.wasCalled()) return true;
final SvnVcs svnVcs = SvnVcs.getInstance(project);
final SvnInteractiveAuthenticationProvider provider = new SvnInteractiveAuthenticationProvider(svnVcs);
final SVNAuthentication svnAuthentication = provider.requestClientAuthentication(kind, url, realm, null, null, true);
if (svnAuthentication != null) {
configuration.acknowledge(kind, realm, svnAuthentication);
/*try {
configuration.getAuthenticationManager(svnVcs).acknowledgeAuthentication(true, kind, realm, null, svnAuthentication);
}
catch (SVNException e) {
LOG.info(e);
// acknowledge at least in runtime
configuration.acknowledge(kind, realm, svnAuthentication);
}*/
return true;
}
return false;
}
}
| IDEA-52507 (Subversion: Pending incoming changes aren't detected anymore) (1)
| plugins/svn4idea/src/org/jetbrains/idea/svn/SvnAuthenticationNotifier.java | IDEA-52507 (Subversion: Pending incoming changes aren't detected anymore) (1) |
|
Java | apache-2.0 | 912a680eebde601bd909249b6ac03c9787d80398 | 0 | gamerson/liferay-blade-cli,gamerson/liferay-blade-cli,gamerson/liferay-blade-cli | /**
* Copyright (c) 2000-present Liferay, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.liferay.blade.gradle.tooling;
import java.beans.BeanInfo;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.io.File;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.inject.Inject;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.Task;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.artifacts.ConfigurationContainer;
import org.gradle.api.artifacts.Dependency;
import org.gradle.api.artifacts.PublishArtifactSet;
import org.gradle.api.file.FileCollection;
import org.gradle.api.plugins.ExtensionAware;
import org.gradle.api.plugins.ExtensionContainer;
import org.gradle.api.tasks.TaskOutputs;
import org.gradle.tooling.provider.model.ToolingModelBuilder;
import org.gradle.tooling.provider.model.ToolingModelBuilderRegistry;
/**
* @author Gregory Amerson
* @author Simon Jiang
*/
public class ProjectInfoPlugin implements Plugin<Project> {
@Inject
public ProjectInfoPlugin(ToolingModelBuilderRegistry toolingModelBuilderRegistry) {
_toolingModelBuilderRegistry = toolingModelBuilderRegistry;
}
@Override
public void apply(Project project) {
_toolingModelBuilderRegistry.register(new ProjectInfoBuilder());
}
private final ToolingModelBuilderRegistry _toolingModelBuilderRegistry;
private static class ProjectInfoBuilder implements ToolingModelBuilder {
@Override
public Object buildAll(String modelName, Project project) {
Set<String> pluginClassNames = new HashSet<>();
for (Plugin<?> plugin : project.getPlugins()) {
Class<?> clazz = plugin.getClass();
pluginClassNames.add(clazz.getName());
}
Set<Task> buildTasks = project.getTasksByName("build", true);
Set<Task> jarTasks = project.getTasksByName("jar", true);
Set<Task> tasks = new HashSet<>();
tasks.addAll(buildTasks);
tasks.addAll(jarTasks);
Map<String, Set<File>> projectOutputFiles = new HashMap<>();
for (Task task : tasks) {
Project taskProject = task.getProject();
String projectPath = taskProject.getPath();
TaskOutputs outputs = task.getOutputs();
FileCollection fileCollection = outputs.getFiles();
Set<File> files = fileCollection.getFiles();
Set<File> outputFiles = projectOutputFiles.computeIfAbsent(projectPath, p -> new HashSet<>());
outputFiles.addAll(files);
}
ConfigurationContainer configurations = project.getConfigurations();
String liferayHome = _getLiferayHome(project);
String deployDir = _getDeployDir(project);
String dockerImageLiferay = _getDockerImageLiferay(project);
String dockerImageId = _getDockerImageId(project);
String dockerContainerId = _getDockerContainerId(project);
try {
Configuration archivesConfiguration = configurations.getByName(Dependency.ARCHIVES_CONFIGURATION);
PublishArtifactSet artifacts = archivesConfiguration.getArtifacts();
FileCollection fileCollection = artifacts.getFiles();
Set<File> files = fileCollection.getFiles();
Set<File> outputFiles = projectOutputFiles.computeIfAbsent(project.getPath(), p -> new HashSet<>());
outputFiles.addAll(files);
}
catch (Exception e) {
}
return new DefaultModel(
pluginClassNames, projectOutputFiles, deployDir, liferayHome, dockerImageLiferay, dockerImageId,
dockerContainerId);
}
@Override
public boolean canBuild(String modelName) {
return modelName.equals(ProjectInfo.class.getName());
}
private String _getDeployDir(Project project) {
return _getExtensionProperty(project, "liferay", "deployDir");
}
private String _getDockerContainerId(Project project) {
Project rootProject = project.getRootProject();
return _getExtensionProperty(
(ExtensionAware)rootProject.getGradle(), "liferayWorkspace", "dockerContainerId");
}
private String _getDockerImageId(Project project) {
Project rootProject = project.getRootProject();
return _getExtensionProperty((ExtensionAware)rootProject.getGradle(), "liferayWorkspace", "dockerImageId");
}
private String _getDockerImageLiferay(Project project) {
Project rootProject = project.getRootProject();
return _getExtensionProperty(
(ExtensionAware)rootProject.getGradle(), "liferayWorkspace", "dockerImageLiferay");
}
private String _getExtensionProperty(ExtensionAware extensionAware, String extensionName, String property) {
ExtensionContainer extensionContainer = extensionAware.getExtensions();
Object extension = extensionContainer.findByName(extensionName);
String returnVal = null;
if (extension != null) {
Class<?> clazz = extension.getClass();
try {
BeanInfo beanInfo = Introspector.getBeanInfo(clazz);
for (PropertyDescriptor propertyDescriptor : beanInfo.getPropertyDescriptors()) {
String propertyDescriptorName = propertyDescriptor.getName();
Method method = propertyDescriptor.getReadMethod();
if ((method != null) && property.equals(propertyDescriptorName)) {
Object value = method.invoke(extension);
returnVal = String.valueOf(value);
}
}
}
catch (Exception e) {
}
}
return returnVal;
}
private String _getLiferayHome(Project project) {
return _getExtensionProperty(project, "liferay", "liferayHome");
}
}
} | gradle-tooling/src/main/java/com/liferay/blade/gradle/tooling/ProjectInfoPlugin.java | /**
* Copyright (c) 2000-present Liferay, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.liferay.blade.gradle.tooling;
import java.beans.BeanInfo;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.io.File;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.inject.Inject;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.Task;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.artifacts.ConfigurationContainer;
import org.gradle.api.artifacts.Dependency;
import org.gradle.api.artifacts.PublishArtifactSet;
import org.gradle.api.file.FileCollection;
import org.gradle.api.plugins.ExtensionAware;
import org.gradle.api.plugins.ExtensionContainer;
import org.gradle.api.tasks.TaskOutputs;
import org.gradle.tooling.provider.model.ToolingModelBuilder;
import org.gradle.tooling.provider.model.ToolingModelBuilderRegistry;
/**
* @author Gregory Amerson
* @author Simon Jiang
*/
public class ProjectInfoPlugin implements Plugin<Project> {
@Inject
public ProjectInfoPlugin(ToolingModelBuilderRegistry toolingModelBuilderRegistry) {
_toolingModelBuilderRegistry = toolingModelBuilderRegistry;
}
@Override
public void apply(Project project) {
_toolingModelBuilderRegistry.register(new ProjectInfoBuilder());
}
private final ToolingModelBuilderRegistry _toolingModelBuilderRegistry;
private static class ProjectInfoBuilder implements ToolingModelBuilder {
@Override
public Object buildAll(String modelName, Project project) {
Set<String> pluginClassNames = new HashSet<>();
for (Plugin<?> plugin : project.getPlugins()) {
Class<?> clazz = plugin.getClass();
pluginClassNames.add(clazz.getName());
}
Set<Task> buildTasks = project.getTasksByName("build", true);
Set<Task> jarTasks = project.getTasksByName("jar", true);
Set<Task> tasks = new HashSet<>();
tasks.addAll(buildTasks);
tasks.addAll(jarTasks);
Map<String, Set<File>> projectOutputFiles = new HashMap<>();
for (Task task : tasks) {
Project taskProject = task.getProject();
String projectPath = taskProject.getPath();
TaskOutputs outputs = task.getOutputs();
FileCollection fileCollection = outputs.getFiles();
Set<File> files = fileCollection.getFiles();
Set<File> outputFiles = projectOutputFiles.computeIfAbsent(projectPath, p -> new HashSet<>());
outputFiles.addAll(files);
}
ConfigurationContainer configurations = project.getConfigurations();
String liferayHome = _getLiferayHome(project);
String deployDir = _getDeployDir(project);
String dockerImageLiferay = _getDockerImageLiferay(project);
String dockerImageId = _getDockerImageId(project);
String dockerContainerId = _getDockerContainerId(project);
try {
Configuration archivesConfiguration = configurations.getByName(Dependency.ARCHIVES_CONFIGURATION);
PublishArtifactSet artifacts = archivesConfiguration.getArtifacts();
FileCollection fileCollection = artifacts.getFiles();
Set<File> files = fileCollection.getFiles();
Set<File> outputFiles = projectOutputFiles.computeIfAbsent(project.getPath(), p -> new HashSet<>());
outputFiles.addAll(files);
}
catch (Exception e) {
}
return new DefaultModel(
pluginClassNames, projectOutputFiles, deployDir, liferayHome, dockerImageLiferay, dockerImageId,
dockerContainerId);
}
@Override
public boolean canBuild(String modelName) {
return modelName.equals(ProjectInfo.class.getName());
}
private String _getDeployDir(Project project) {
return _getExtensionProperty(project, "liferay", "deployDir");
}
private String _getDockerContainerId(Project project) {
return _getExtensionProperty(project, "liferayWorkspace", "dockerContainerId");
}
private String _getDockerImageId(Project project) {
return _getExtensionProperty(project, "liferayWorkspace", "dockerImageId");
}
private String _getDockerImageLiferay(Project project) {
return _getExtensionProperty(project, "liferayWorkspace", "dockerImageLiferay");
}
private String _getExtensionProperty(Project project, String extension, String property) {
ExtensionContainer extensionContainer = project.getExtensions();
Object liferayExtension = extensionContainer.findByName(extension);
if (project.equals(project.getRootProject())) {
ExtensionAware extensionAware = (ExtensionAware)project.getGradle();
ExtensionContainer rootExtensionContainer = extensionAware.getExtensions();
liferayExtension = rootExtensionContainer.findByName("liferayWorkspace");
}
String liferayHome = null;
if (liferayExtension != null) {
Class<?> clazz = liferayExtension.getClass();
try {
BeanInfo beanInfo = Introspector.getBeanInfo(clazz);
for (PropertyDescriptor propertyDescriptor : beanInfo.getPropertyDescriptors()) {
String propertyDescriptorName = propertyDescriptor.getName();
Method method = propertyDescriptor.getReadMethod();
if ((method != null) && property.equals(propertyDescriptorName)) {
Object value = method.invoke(liferayExtension);
liferayHome = String.valueOf(value);
}
}
}
catch (Exception e) {
}
}
return liferayHome;
}
private String _getLiferayHome(Project project) {
return _getExtensionProperty(project, "liferay", "liferayHome");
}
}
} | BLADE-487 load extensions from gradle or project objects
| gradle-tooling/src/main/java/com/liferay/blade/gradle/tooling/ProjectInfoPlugin.java | BLADE-487 load extensions from gradle or project objects |
|
Java | apache-2.0 | df87b152a43e535c63cfb587adf2a60ed0937d75 | 0 | 243826/japicmp,siom79/japicmp,eldur/japicmp,siom79/japicmp,eldur/japicmp,siom79/japicmp,eldur/japicmp | package japicmp.model;
import com.google.common.base.Optional;
import japicmp.exception.JApiCmpException;
import japicmp.util.ModifierHelper;
import javassist.*;
import java.io.Externalizable;
import java.io.Serializable;
import java.util.List;
public class JavaObjectSerializationCompatibility {
public static final String SERIAL_VERSION_UID = "serialVersionUID";
public void evaluate(List<JApiClass> classes) {
for (JApiClass jApiClass : classes) {
JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus changeStatus = computeChangeStatus(jApiClass);
jApiClass.setJavaObjectSerializationCompatible(changeStatus);
}
}
private JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus computeChangeStatus(JApiClass jApiClass) {
JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus state = JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus.NOT_SERIALIZABLE;
long serialVersionUidOld = -1L;
long serialVersionUidOldDefault = -1L;
long serialVersionUidNew = -1L;
long serialVersionUidNewDefault = -1L;
boolean serializable = false;
Optional<CtClass> oldClassOptional = jApiClass.getOldClass();
if (oldClassOptional.isPresent()) {
CtClass ctClass = oldClassOptional.get();
SerialVersionUidResult serialVersionUidResult = new SerialVersionUidResult(serialVersionUidOld, serialVersionUidOldDefault, ctClass).invoke();
serializable = serialVersionUidResult.isSerializable();
serialVersionUidOld = serialVersionUidResult.getSerialVersionUid();
serialVersionUidOldDefault = serialVersionUidResult.getSerialVersionUidDefault();
}
Optional<CtClass> newClassOptional = jApiClass.getNewClass();
if (newClassOptional.isPresent()) {
CtClass ctClass = newClassOptional.get();
SerialVersionUidResult serialVersionUidResult = new SerialVersionUidResult(serialVersionUidNew, serialVersionUidNewDefault, ctClass).invoke();
serializable = serialVersionUidResult.isSerializable();
serialVersionUidNew = serialVersionUidResult.getSerialVersionUid();
serialVersionUidNewDefault = serialVersionUidResult.getSerialVersionUidDefault();
}
if (serializable) {
if (serialVersionUidOld == serialVersionUidNew) {
if (serialVersionUidOldDefault != serialVersionUidNewDefault) {
JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus checkChanges = checkChanges(jApiClass);
if (checkChanges == JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus.SERIALIZABLE_INCOMPATIBLE) {
state = JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus.SERIALIZABLE_INCOMPATIBLE_BUT_SUID_EQUAL;
} else {
state = JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus.SERIALIZABLE_COMPATIBLE;
}
} else {
state = JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus.SERIALIZABLE_COMPATIBLE;
}
} else {
state = JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus.SERIALIZABLE_INCOMPATIBLE;
}
}
return state;
}
/**
* Checks compatibility of changes according to http://docs.oracle.com/javase/7/docs/platform/serialization/spec/version.html#5172.
* @param jApiClass the class to check
* @return either SERIALIZABLE_INCOMPATIBLE or SERIALIZABLE_COMPATIBLE
*/
private JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus checkChanges(JApiClass jApiClass) {
JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus state = JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus.SERIALIZABLE_COMPATIBLE;
for (JApiField field : jApiClass.getFields()) {
if (field.getChangeStatus() == JApiChangeStatus.REMOVED) {
state = JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus.SERIALIZABLE_INCOMPATIBLE;
}
if (field.getStaticModifier().getChangeStatus() == JApiChangeStatus.NEW) {
state = JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus.SERIALIZABLE_INCOMPATIBLE;
}
if (field.getTransientModifier().getChangeStatus() == JApiChangeStatus.NEW) {
state = JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus.SERIALIZABLE_INCOMPATIBLE;
}
if (field.getType().getChangeStatus() == JApiChangeStatus.MODIFIED) {
state = JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus.SERIALIZABLE_INCOMPATIBLE;
}
}
boolean serializableAdded = false;
boolean serializableRemoved = false;
boolean externalizableAdded = false;
boolean externalizableRemoved = false;
for (JApiImplementedInterface implementedInterface : jApiClass.getInterfaces()) {
if (Serializable.class.getCanonicalName().equals(implementedInterface.getFullyQualifiedName())) {
if (implementedInterface.getChangeStatus() == JApiChangeStatus.NEW) {
serializableAdded = true;
} else if (implementedInterface.getChangeStatus() == JApiChangeStatus.REMOVED) {
serializableRemoved = true;
}
}
if (Externalizable.class.getCanonicalName().equals(implementedInterface.getFullyQualifiedName())) {
if (implementedInterface.getChangeStatus() == JApiChangeStatus.NEW) {
externalizableAdded = true;
} else if (implementedInterface.getChangeStatus() == JApiChangeStatus.REMOVED) {
externalizableRemoved = true;
}
}
}
if (serializableRemoved && externalizableAdded) {
state = JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus.SERIALIZABLE_INCOMPATIBLE;
}
if (serializableAdded && externalizableRemoved) {
state = JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus.SERIALIZABLE_INCOMPATIBLE;
}
if (serializableRemoved) {
state = JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus.SERIALIZABLE_INCOMPATIBLE;
}
if (externalizableRemoved) {
state = JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus.SERIALIZABLE_INCOMPATIBLE;
}
//TODO: type of class changes
return state;
}
private boolean isCtClassSerializable(CtClass clazz) {
ClassPool pool = clazz.getClassPool();
try {
return clazz.subtypeOf(pool.get("java.io.Serializable"));
} catch (NotFoundException e) {
throw new JApiCmpException(JApiCmpException.Reason.ClassLoading, "Failed to determine whether the class '" + clazz.getName() + "' is serializable: " + e.getMessage(), e);
}
}
private class SerialVersionUidResult {
private long serialVersionUid;
private long serialVersionUidDefault;
private boolean serializable;
private CtClass ctClass;
public SerialVersionUidResult(long serialVersionUid, long serialVersionUidDefault, CtClass ctClass) {
this.serialVersionUid = serialVersionUid;
this.serialVersionUidDefault = serialVersionUidDefault;
this.ctClass = ctClass;
}
public long getSerialVersionUid() {
return serialVersionUid;
}
public long getSerialVersionUidDefault() {
return serialVersionUidDefault;
}
public boolean isSerializable() {
return serializable;
}
public SerialVersionUidResult invoke() {
if (isCtClassSerializable(ctClass)) {
serializable = true;
try {
CtField declaredField = ctClass.getDeclaredField(SERIAL_VERSION_UID);
Object constantValue = declaredField.getConstantValue();
if (constantValue instanceof Long) {
serialVersionUid = (Long)constantValue;
}
} catch (NotFoundException e) {
try {
SerialVersionUID.setSerialVersionUID(ctClass);
CtField declaredField = ctClass.getDeclaredField(SERIAL_VERSION_UID);
Object constantValue = declaredField.getConstantValue();
if (constantValue instanceof Long) {
serialVersionUid = (Long)constantValue;
serialVersionUidDefault = serialVersionUid;
}
ctClass.removeField(declaredField);
} catch (Exception ignored) {}
}
if (serialVersionUidDefault == -1L) {
try {
CtField declaredFieldOriginal = ctClass.getDeclaredField(SERIAL_VERSION_UID);
ctClass.removeField(declaredFieldOriginal);
SerialVersionUID.setSerialVersionUID(ctClass);
CtField declaredField = ctClass.getDeclaredField(SERIAL_VERSION_UID);
Object constantValue = declaredField.getConstantValue();
if (constantValue instanceof Long) {
serialVersionUidDefault = (Long)constantValue;
}
ctClass.removeField(declaredField);
ctClass.addField(declaredFieldOriginal);
} catch (Exception ignored) {}
}
}
return this;
}
}
}
| japicmp/src/main/java/japicmp/model/JavaObjectSerializationCompatibility.java | package japicmp.model;
import com.google.common.base.Optional;
import japicmp.exception.JApiCmpException;
import javassist.*;
import java.util.List;
public class JavaObjectSerializationCompatibility {
public static final String SERIAL_VERSION_UID = "serialVersionUID";
public void evaluate(List<JApiClass> classes) {
for (JApiClass jApiClass : classes) {
JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus changeStatus = computeChangeStatus(jApiClass);
jApiClass.setJavaObjectSerializationCompatible(changeStatus);
}
}
private JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus computeChangeStatus(JApiClass jApiClass) {
JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus state = JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus.NOT_SERIALIZABLE;
long serialVersionUidOld = -1L;
long serialVersionUidOldDefault = -1L;
long serialVersionUidNew = -1L;
long serialVersionUidNewDefault = -1L;
boolean serializable = false;
Optional<CtClass> oldClassOptional = jApiClass.getOldClass();
if (oldClassOptional.isPresent()) {
CtClass ctClass = oldClassOptional.get();
SerialVersionUidResult serialVersionUidResult = new SerialVersionUidResult(serialVersionUidOld, serialVersionUidOldDefault, ctClass).invoke();
serializable = serialVersionUidResult.isSerializable();
serialVersionUidOld = serialVersionUidResult.getSerialVersionUid();
serialVersionUidOldDefault = serialVersionUidResult.getSerialVersionUidDefault();
}
Optional<CtClass> newClassOptional = jApiClass.getNewClass();
if (newClassOptional.isPresent()) {
CtClass ctClass = newClassOptional.get();
SerialVersionUidResult serialVersionUidResult = new SerialVersionUidResult(serialVersionUidNew, serialVersionUidNewDefault, ctClass).invoke();
serializable = serialVersionUidResult.isSerializable();
serialVersionUidNew = serialVersionUidResult.getSerialVersionUid();
serialVersionUidNewDefault = serialVersionUidResult.getSerialVersionUidDefault();
}
if (serializable) {
if (serialVersionUidOld == serialVersionUidNew) {
if (serialVersionUidOldDefault != serialVersionUidNewDefault) {
state = JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus.SERIALIZABLE_INCOMPATIBLE_BUT_SUID_EQUAL;
} else {
state = JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus.SERIALIZABLE_COMPATIBLE;
}
} else {
state = JApiJavaObjectSerializationCompatibility.JApiJavaObjectSerializationChangeStatus.SERIALIZABLE_INCOMPATIBLE;
}
}
return state;
}
private boolean isCtClassSerializable(CtClass clazz) {
ClassPool pool = clazz.getClassPool();
try {
return clazz.subtypeOf(pool.get("java.io.Serializable"));
} catch (NotFoundException e) {
throw new JApiCmpException(JApiCmpException.Reason.ClassLoading, "Failed to determine whether the class '" + clazz.getName() + "' is serializable: " + e.getMessage(), e);
}
}
private class SerialVersionUidResult {
private long serialVersionUid;
private long serialVersionUidDefault;
private boolean serializable;
private CtClass ctClass;
public SerialVersionUidResult(long serialVersionUid, long serialVersionUidDefault, CtClass ctClass) {
this.serialVersionUid = serialVersionUid;
this.serialVersionUidDefault = serialVersionUidDefault;
this.ctClass = ctClass;
}
public long getSerialVersionUid() {
return serialVersionUid;
}
public long getSerialVersionUidDefault() {
return serialVersionUidDefault;
}
public boolean isSerializable() {
return serializable;
}
public SerialVersionUidResult invoke() {
if (isCtClassSerializable(ctClass)) {
serializable = true;
try {
CtField declaredField = ctClass.getDeclaredField(SERIAL_VERSION_UID);
Object constantValue = declaredField.getConstantValue();
if (constantValue instanceof Long) {
serialVersionUid = (Long)constantValue;
}
} catch (NotFoundException e) {
try {
SerialVersionUID.setSerialVersionUID(ctClass);
CtField declaredField = ctClass.getDeclaredField(SERIAL_VERSION_UID);
Object constantValue = declaredField.getConstantValue();
if (constantValue instanceof Long) {
serialVersionUid = (Long)constantValue;
serialVersionUidDefault = serialVersionUid;
}
ctClass.removeField(declaredField);
} catch (Exception ignored) {}
}
if (serialVersionUidDefault == -1L) {
try {
CtField declaredFieldOriginal = ctClass.getDeclaredField(SERIAL_VERSION_UID);
ctClass.removeField(declaredFieldOriginal);
SerialVersionUID.setSerialVersionUID(ctClass);
CtField declaredField = ctClass.getDeclaredField(SERIAL_VERSION_UID);
Object constantValue = declaredField.getConstantValue();
if (constantValue instanceof Long) {
serialVersionUidDefault = (Long)constantValue;
}
ctClass.removeField(declaredField);
ctClass.addField(declaredFieldOriginal);
} catch (Exception ignored) {}
}
}
return this;
}
}
}
| added check of compatibility of changes of serializable class according to http://docs.oracle.com/javase/7/docs/platform/serialization/spec/version.html#5172
| japicmp/src/main/java/japicmp/model/JavaObjectSerializationCompatibility.java | added check of compatibility of changes of serializable class according to http://docs.oracle.com/javase/7/docs/platform/serialization/spec/version.html#5172 |
|
Java | apache-2.0 | ce516fb1c612727d092901e0bb7c65acaed3b305 | 0 | apache/uima-uimaj,apache/uima-uimaj,apache/uima-uimaj,apache/uima-uimaj,apache/uima-uimaj | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.uima.migratev3.jcas;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.io.StringReader;
import java.io.Writer;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.charset.Charset;
import java.nio.file.FileAlreadyExistsException;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.FileVisitOption;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Stream;
import javax.tools.Diagnostic;
import javax.tools.DiagnosticCollector;
import javax.tools.JavaCompiler;
import javax.tools.JavaFileObject;
import javax.tools.StandardJavaFileManager;
import javax.tools.ToolProvider;
import org.apache.uima.UIMARuntimeException;
import org.apache.uima.cas.impl.TypeImpl;
import org.apache.uima.cas.impl.TypeSystemImpl;
import org.apache.uima.cas.impl.UimaDecompiler;
import org.apache.uima.internal.util.CommandLineParser;
import org.apache.uima.internal.util.Misc;
import org.apache.uima.internal.util.UIMAClassLoader;
import org.apache.uima.internal.util.function.Runnable_withException;
import org.apache.uima.pear.tools.PackageBrowser;
import org.apache.uima.pear.tools.PackageInstaller;
import org.apache.uima.util.FileUtils;
import com.github.javaparser.JavaParser;
import com.github.javaparser.ast.CompilationUnit;
import com.github.javaparser.ast.ImportDeclaration;
import com.github.javaparser.ast.Modifier;
import com.github.javaparser.ast.Node;
import com.github.javaparser.ast.NodeList;
import com.github.javaparser.ast.PackageDeclaration;
import com.github.javaparser.ast.body.AnnotationDeclaration;
import com.github.javaparser.ast.body.BodyDeclaration;
import com.github.javaparser.ast.body.ClassOrInterfaceDeclaration;
import com.github.javaparser.ast.body.ConstructorDeclaration;
import com.github.javaparser.ast.body.EnumDeclaration;
import com.github.javaparser.ast.body.FieldDeclaration;
import com.github.javaparser.ast.body.MethodDeclaration;
import com.github.javaparser.ast.body.Parameter;
import com.github.javaparser.ast.body.TypeDeclaration;
import com.github.javaparser.ast.body.VariableDeclarator;
import com.github.javaparser.ast.comments.Comment;
import com.github.javaparser.ast.expr.AssignExpr;
import com.github.javaparser.ast.expr.BinaryExpr;
import com.github.javaparser.ast.expr.CastExpr;
import com.github.javaparser.ast.expr.EnclosedExpr;
import com.github.javaparser.ast.expr.Expression;
import com.github.javaparser.ast.expr.FieldAccessExpr;
import com.github.javaparser.ast.expr.MethodCallExpr;
import com.github.javaparser.ast.expr.Name;
import com.github.javaparser.ast.expr.NameExpr;
import com.github.javaparser.ast.expr.NullLiteralExpr;
import com.github.javaparser.ast.expr.ObjectCreationExpr;
import com.github.javaparser.ast.expr.SimpleName;
import com.github.javaparser.ast.expr.StringLiteralExpr;
import com.github.javaparser.ast.stmt.BlockStmt;
import com.github.javaparser.ast.stmt.EmptyStmt;
import com.github.javaparser.ast.stmt.ExplicitConstructorInvocationStmt;
import com.github.javaparser.ast.stmt.ExpressionStmt;
import com.github.javaparser.ast.stmt.IfStmt;
import com.github.javaparser.ast.stmt.ReturnStmt;
import com.github.javaparser.ast.stmt.Statement;
import com.github.javaparser.ast.type.ClassOrInterfaceType;
import com.github.javaparser.ast.type.PrimitiveType;
import com.github.javaparser.ast.type.Type;
import com.github.javaparser.ast.visitor.VoidVisitorAdapter;
import com.github.javaparser.printer.PrettyPrinter;
import com.github.javaparser.printer.PrettyPrinterConfiguration;
/**
* <p>
* A driver that scans given roots for source and/or class Java files that contain JCas classes
*
* <ul>
* <li>identifies which ones appear to be JCas classes (heuristic)
* <ul>
* <li>of these, identifies which ones appear to be v2
* <ul>
* <li>converts these to v3</li>
* </ul>
* </li>
* </ul>
*
* <li>also can receive a list of individual class names</li>
* <li>also can do a single source file</li>
* </ul>
*
* <p>
* Creates summary and detailed reports of its actions.
*
* <p>
* Files representing JCas classes to convert are discovered by walking file system directories from
* various roots, specified as input. The tool operates in 1 of two exclusive "modes": migrating
* from sources (e.g., .java files) and migrating using compiled classes.
*
* <p>
* Compiled classes are decompiled and then migrated. This decompilation step usually requires a
* java classpath, which is supplied using the -migrateClasspath parameter. Exception: migrating
* PEAR files, which contain their own specification for a classpath.
*
* <p>
* The same JCas class may be encountered multiple times while walking the directory tree from the
* roots, with the same or different definition. All of these definitions are migrated.
*
* <p>
* Copies of the original and the converted files are put into the output file tree.
*
* <p>
* Directory structure, starting at -outputDirectory (which if not specified, is a new temp
* directory). The "a0", "a1" represent non-identical alternative definitions for the same class.
*
* <pre>
* converted/
* v2/ these are the decompiled or "found" source files
* a0/x/y/z/javapath/.../Classname.java root-id + fully qualified java class + package as slashified name
* /Classname2.java etc.
* a1/x/y/z/javapath/.../Classname.java if there are different root-ids
* ...
* v3/
* a0/x/y/z/javapath/.../Classname.java fully qualified java class + package as slashified name
* /Classname2.java etc.
* a1/x/y/z/javapath/.../Classname.java if there are different root-ids
* ...
*
* v3-classes - the compiled form if from classes and a java compiler was available
* The first directory is the id of the Jar or PEAR container.
* The second directory is the alternative.
*
* 23/a0/fully/slashified/package/class-name.class << parallel structure as v3/
*
* jars/ - copies of the original JARs with the converted JCas classes
* The first directory is the id of the Jar or PEAR container
* 7/jar-file-name-last-part.jar
* 12/jar-file-name-last-part.jar
* 14/ etc.
*
* pears - copies of the original PEARs with the converted JCas classes, if there were no duplicates
* 8/pear-file-name-last-art.pear
* 9/ etc.
*
* not-converted/ (skipped)
* logs/
* jar-map.txt list of index to paths
* pear-map.txt list of index to paths
* processed.txt
* duplicates.txt
* builtinsNotExtended.txt
* failed.txt
* skippedBuiltins.txt
* nonJCasFiles.txt
* woraroundDir.txt
* deletedCheckModified.txt
* manualInspection.txt
* pearFileUpdates.txt
* jarFileUpdates.txt
* ...
* </pre>
*
* <p>
* Operates in one of two modes:
*
* <pre>
* Mode 1: Given classes-roots and/or individual class names, and a migrateClasspath,
* scans the classes-routes looking for classes candidates
* - determines the class name,
* - decompiles that
* - migrates that decompiled source.
*
* if a Java compiler (JDK) is available,
* - compiles the results
* - does reassembly for Jars and PEARs, replacing the JCas classes.
*
* Mode 2: Given sources-roots or a single source java file
* scans the sources-routes looking for candidates
* - migrates that decompiled source.
* </pre>
*
* <p>
* Note: Each run clears the output directory before starting the migration.
*
* <p>
* Note: classpath may be specified using -migrateClassPath or as the class path used to run this
* tool.
*/
public class MigrateJCas extends VoidVisitorAdapter<Object> {
// @formatter:off
/*
* ***************************************************** Internals
*
* Unique IDs of v2 and v3 artifacts: RootId + classname
*
* RootIdContainers (Set<RootId>) hold all discovered rootIds, at each Jar/Pear nesting level
* including outer level (no Jar/Pear). These are kept in a push-down stack
*
*
* Processing roots collection: done for source or class - iterate, for all roots --
* processCollection for candidates rooted at that root --- candidate is .java or .class, with
* path, with pearClasspath string ---- migrate called on each candidate ----- check to see if
* already done, and if so, skip. ------ means: same byte or source code associated with same fqcn
*
* Root-ids: created for each unique pathpart in front of fully-qualified class name created for
* each unique path to Jar or PEAR
*
* Caching to speed up duplicate processing: - decompiling: if the byte[] is already done, use
* other value (if augmented migrateClasspath is the same) - source-migrating: if the source
* strings are the same.
*
* Multiple sources for single class: classname2multiSources: TreeMap from fqcn to CommonConverted
* (string or bytes) CommonConverted: supports multiple paths having identical string/bytes.
*
* Unique IDs of v2 and v3 artifacts:
* RootId + classname
*
* RootIdContainers (Set<RootId>) hold all discovered rootIds, at each Jar/Pear nesting level
* including outer level (no Jar/Pear).
* These are kept in a push-down stack
*
*
* Processing roots collection: done for source or class
* - iterate, for all roots
* -- processCollection for candidates rooted at that root
* --- candidate is .java or .class, with path, with pearClasspath string
* ---- migrate called on each candidate
* ----- check to see if already done, and if so, skip.
* ------ means: same byte or source code associated with same fqcn
*
* Root-ids: created for each unique pathpart in front of fully-qualified class name
* created for each unique path to Jar or PEAR
*
* Caching to speed up duplicate processing:
* - decompiling: if the byte[] is already done, use other value (if augmented migrateClasspath is the same)
* - source-migrating: if the source strings are the same.
*
* Multiple sources for single class:
* classname2multiSources: TreeMap from fqcn to CommonConverted (string or bytes)
* CommonConverted: supports multiple paths having identical string/bytes.
*
* Compiling: driven from c2ps array of fqcn, path
* - may have multiple entries for same fqcn, with different paths,
* -- only if different values for the impl
* - set when visiting top-level compilation unit non-built-in type
*
*/
// @formatter:on
/** manange the indention of printing routines */
private static final int[] indent = new int[1];
private static StringBuilder si(StringBuilder sb) {
return Misc.indent(sb, indent);
}
private static StringBuilder flush(StringBuilder sb) {
System.out.print(sb);
sb.setLength(0);
return sb;
}
private static final Integer INTEGER0 = 0;
private static int nextContainerId = 0;
// @formatter:off
/******************************************************************
* Container - exists in tree structure, has super, sub containers
* -- subcontainers: has path to it
* - holds set of rootIds in that container
* - topmost one has null parent, and null pathToJarOrPear
******************************************************************/
// @formatter:on
private static class Container implements Comparable<Container> {
final int id = nextContainerId++;
final Container parent; // null if at top level
// @formatter:off
/** root to scan from.
* Pears: is the loc in temp space of installed pear
* Jars: is the file system mounted on the Jar
* -- for inner Jars, the Jar is copied out into temp space. */
// @formatter:on
Path root;
final Path rootOrig; // for Jars and Pears, the original path ending in jar or pear
final Set<Container> subContainers = new TreeSet<>(); // tree set for better ordering
final List<Path> candidates = new ArrayList<>();
final List<CommonConverted> convertedItems = new ArrayList<>();
final List<V3CompiledPathAndContainerItemPath> v3CompiledPathAndContainerItemPath = new ArrayList<>();
final boolean isPear;
final boolean isJar;
final boolean isSingleJavaSource;
/** can't use Path as the type, because the equals for Path is object == */
final Set<String> _Types = new HashSet<>(); // has the non_Type path only if the _Type is found
boolean haveDifferentCapitalizedNamesCollidingOnWindows = false;
String pearClasspath; // not final - set by subroutine after defaulting
/**
* Cache of already done compiled classes, to avoid redoing them Kept by container, because the
* classpath could change the decompile
*/
private Map<byte[], CommonConverted> origBytesToCommonConverted = new HashMap<>();
Container(Container parent, Path root) {
this.parent = parent;
if (parent != null) {
parent.subContainers.add(this);
this.pearClasspath = parent.pearClasspath; // default, when expanding Jars.
}
this.rootOrig = root;
String s = root.toString().toLowerCase();
isJar = s.endsWith(".jar");
isPear = s.endsWith(".pear");
isSingleJavaSource = s.endsWith(".java");
this.root = (isPear || isJar) ? installJarOrPear() : root;
// // debug
// if (!isPear && isJar) {
// System.out.println("debug prepare jar: " + this);
// }
}
/**
* Called when a new container is created
*
* @param container
* @param path
* @return install directory
*/
private Path installJarOrPear() {
try {
Path theJarOrPear = rootOrig;
if (!theJarOrPear.getFileSystem().equals(FileSystems.getDefault())) {
// pear is embedded in another pear or jar, so copy the Jar (intact) to a temp spot so
// it's no longer embedded
theJarOrPear = getTempOutputPathForJarOrPear(theJarOrPear);
Files.copy(rootOrig, theJarOrPear, StandardCopyOption.REPLACE_EXISTING);
}
if (isPear) {
// extract the pear just to get the classpath
File pearInstallDir = Files.createTempDirectory(getTempDir(), "installedPear").toFile();
PackageBrowser ip = PackageInstaller.installPackage(pearInstallDir, rootOrig.toFile(),
false);
String newClasspath = ip.buildComponentClassPath();
String parentClasspath = parent.pearClasspath;
this.pearClasspath = (null == parentClasspath || 0 == parentClasspath.length())
? newClasspath
: newClasspath + File.pathSeparator + parentClasspath;
}
FileSystem pfs = FileSystems.newFileSystem(theJarOrPear, (ClassLoader) null);
return pfs.getPath("/");
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = toString1();
indent[0] += 2;
try {
si(sb); // new line + indent
sb.append("subContainers=");
Misc.addElementsToStringBuilder(indent, sb, Misc.setAsList(subContainers), -1,
(sbx, i) -> sbx.append(i.id)).append(',');
si(sb).append("paths migrated="); // new line + indent
Misc.addElementsToStringBuilder(indent, sb, candidates, -1, StringBuilder::append)
.append(',');
// si(sb).append("v3CompilePath="); // new line + indent
// Misc.addElementsToStringBuilder(indent, sb, v3CompiledPathAndContainerItemPath, 100,
// StringBuilder::append);
} finally {
indent[0] -= 2;
si(sb).append(']');
}
return sb.toString();
}
public StringBuilder toString1() {
StringBuilder sb = new StringBuilder();
si(sb); // initial nl and indentation
sb.append(isJar ? "Jar " : isPear ? "PEAR " : "");
sb.append("container [id=").append(id).append(", parent.id=")
.append((null == parent) ? "null" : parent.id).append(", root or pathToJarOrPear=")
.append(rootOrig).append(',');
return sb;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
return 31 * id;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Container other = (Container) obj;
if (id != other.id)
return false;
return true;
}
@Override
public int compareTo(Container o) {
return Integer.compare(id, o.id);
}
}
/**
* A path to a .java or .class file in some container, for the v2 version For Jars and Pears, the
* path is relative to the zip "/" dir
*/
private static class ContainerAndPath implements Comparable<ContainerAndPath> {
final Path path;
final Container container;
ContainerAndPath(Path path, Container container) {
this.path = path;
this.container = container;
}
/*
* (non-Javadoc)
*
* @see java.lang.Comparable#compareTo(java.lang.Object)
*/
@Override
public int compareTo(ContainerAndPath o) {
int r = path.compareTo(o.path);
if (r != 0) {
return r;
}
return Integer.compare(container.id, o.container.id);
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("ContainerAndPath [path=").append(path).append(", container=").append(container.id)
.append("]");
return sb.toString();
}
}
// @formatter:off
/**
* This class holds information used to replace compiled items in Jars and Pears.
*
* a pair of the v3CompiledPath (which is the container nbr/a0/ + the package-class-name slash + ".class"
* and the Container origRoot up to the start of the package and class name
* for the item being compiled.
* - Note: if a Jar has the same compiled class at multiple nesting levels, each one will have
* an instance of this class
*/
// @formatter:on
private static class V3CompiledPathAndContainerItemPath {
final Path v3CompiledPath;
final String pathInContainer;
public V3CompiledPathAndContainerItemPath(Path v3CompiledPath, String pathInContainer) {
this.v3CompiledPath = v3CompiledPath;
this.pathInContainer = pathInContainer;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
si(sb).append("v3CompiledPathAndContainerPartPath [");
indent[0] += 2;
try {
si(sb).append("v3CompiledPath=").append(v3CompiledPath);
si(sb).append("pathInContainer=").append(pathInContainer);
} finally {
indent[0] -= 2;
si(sb).append("]");
}
return sb.toString();
}
}
private static final JavaCompiler javaCompiler = ToolProvider.getSystemJavaCompiler();
/****************************************************************
* Command line parameters
****************************************************************/
private static final String SOURCE_FILE_ROOTS = "-sourcesRoots";
private static final String CLASS_FILE_ROOTS = "-classesRoots";
private static final String OUTPUT_DIRECTORY = "-outputDirectory";
// private static final String SKIP_TYPE_CHECK = "-skipTypeCheck";
private static final String MIGRATE_CLASSPATH = "-migrateClasspath";
// private static final String CLASSES = "-classes"; // individual classes to migrate, get from
// supplied classpath
private static final Type intType = PrimitiveType.intType();
private static final Type callSiteType = JavaParser.parseType("CallSite");
private static final Type methodHandleType = JavaParser.parseType("MethodHandle");
private static final Type stringType = JavaParser.parseType("String");
private static final EnumSet<Modifier> public_static_final = EnumSet.of(Modifier.PUBLIC,
Modifier.STATIC, Modifier.FINAL);
private static final EnumSet<Modifier> private_static_final = EnumSet.of(Modifier.PRIVATE,
Modifier.STATIC, Modifier.FINAL);
private static final PrettyPrinterConfiguration printWithoutComments = new PrettyPrinterConfiguration();
static {
printWithoutComments.setPrintComments(false);
}
private static final PrettyPrinterConfiguration printCu = new PrettyPrinterConfiguration();
static {
printCu.setIndent(" ");
}
private static final String ERROR_DECOMPILING = "!!! ERROR:";
static private boolean isSource = false;
static private Path tempDir = null;
/***************************************************************************************************/
private String packageName; // with dots?
private String className; // (omitting package)
private String packageAndClassNameSlash;
// next 3 set at start of migrate for item being migrated
private CommonConverted current_cc;
private Path current_path;
private Container current_container;
/** includes trailing / */
private String outputDirectory;
/** includes trailing / */
private String outDirConverted;
/** includes trailing / */
private String outDirSkipped;
/** includes trailing / */
private String outDirLog;
private Container[] sourcesRoots = null; // only one of these has 1 or more Container instances
private Container[] classesRoots = null;
private CompilationUnit cu;
// save this value in the class instance to avoid recomputing it
private ClassLoader cachedMigrateClassLoader = null;
private String migrateClasspath = null;
// private String individualClasses = null; // to decompile
/**
* CommonConverted next id, by fqcn key: fqcn_slashes value: next id
*/
private Map<String, Integer> nextCcId = new HashMap<>();
// @formatter:off
/**
* Common info about a particular source-code instance of a class
* Used to avoid duplicate work for the same JCas definition
* Used to track identical and non-identical duplicate defs
*
* When processing from sourcesRoots:
* use map: origSourceToCommonConverted key = source string
* if found, skip conversion, use previous converted result.
*
* When processing from classesRoots:
* use map: origBytesToCommonConverted key = byte[], kept by container in container
* if found, use previous converted results
*/
// @formatter:on
private class CommonConverted {
/**
* starts at 0, incr for each new instance for a particular fqcn_slash can't be assigned until
* fqcn known
*/
int id = -1; // temp value
final String v2Source; // remembered original source
final byte[] v2ByteCode; // remembered original bytes
/**
* all paths + their containers having the same converted result Need container because might
* change classpath for compiling - path is to v2 source or compiled class
*/
final Set<ContainerAndPath> containersAndV2Paths = new HashSet<>();
String v3Source; // if converted, the result
/** converted/v3/id-of-cc/pkg/name/classname.java */
Path v3SourcePath; // path to converted source or null
String fqcn_slash; // full name of the class e.g. java/util/Foo. unknown for sources at first
CommonConverted(String origSource, byte[] v2ByteCode, Path path, Container container,
String fqcn_slash) {
this.v2Source = origSource;
this.v2ByteCode = v2ByteCode;
containersAndV2Paths.add(new ContainerAndPath(path, container));
this.fqcn_slash = fqcn_slash;
}
/**
*
* @param container
* having this commonConverted instance
* @return the path to .java or .class file. If the container is a Jar or PEAR, it is the path
* within that Jar or Pear FileSystem
*/
Path getV2SourcePath(Container container) {
for (ContainerAndPath cp : containersAndV2Paths) {
if (cp.container == container) {
return cp.path;
}
}
throw new RuntimeException("internalError");
}
int getId() {
if (id < 0) {
Integer nextId = nextCcId.computeIfAbsent(fqcn_slash, s -> INTEGER0);
nextCcId.put(fqcn_slash, nextId + 1);
this.id = nextId;
}
return id;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
return v2Source == null ? 0 : v2Source.hashCode();
}
/*
* equal if the v2source is equal
*/
@Override
public boolean equals(Object obj) {
return obj instanceof CommonConverted && v2Source != null
&& v2Source.equals(((CommonConverted) obj).v2Source);
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
int maxLen = 10;
si(sb).append("CommonConverted [v2Source=").append(Misc.elide(v2Source, 100));
indent[0] += 2;
try {
si(sb).append("v2ByteCode=");
sb.append(v2ByteCode != null
? Arrays.toString(Arrays.copyOf(v2ByteCode, Math.min(v2ByteCode.length, maxLen)))
: "null").append(',');
si(sb).append("containersAndPaths=")
.append(containersAndV2Paths != null ? Misc.ppList(indent,
Misc.setAsList(containersAndV2Paths), -1, StringBuilder::append) : "null")
.append(',');
si(sb).append("v3SourcePath=").append(v3SourcePath).append(',');
si(sb).append("fqcn_slash=").append(fqcn_slash).append("]").append('\n');
} finally {
indent[0] -= 2;
}
return sb.toString();
}
}
// @formatter:off
/** Cache of already converted source classes, to avoid redoing them;
* - key is the actual source
* - value is CommonConverted
* This cache is over all containers
*/
// @formatter:on
private Map<String, CommonConverted> sourceToCommonConverted = new HashMap<>();
/**
* A map from fqcn_slash to a list of converted sources one per non-duplicated source
*/
private Map<String, List<CommonConverted>> classname2multiSources = new TreeMap<>();
/************************************
* Reporting
************************************/
// private final List<Path> v2JCasFiles = new ArrayList<>(); // unused
// private final List<Path> v3JCasFiles = new ArrayList<>(); // unused
private final List<PathContainerAndReason> nonJCasFiles = new ArrayList<>(); // path, reason
private final List<PathContainerAndReason> failedMigration = new ArrayList<>(); // path, reason
private final List<PathContainerAndReason> skippedBuiltins = new ArrayList<>(); // path,
// "built-in"
private final List<PathContainerAndReason> deletedCheckModified = new ArrayList<>(); // path,
// deleted
// check
// string
private final List<String1AndString2> pathWorkaround = new ArrayList<>(); // original, workaround
private final List<String1AndString2> pearClassReplace = new ArrayList<>(); // pear, classname
private final List<String1AndString2> jarClassReplace = new ArrayList<>(); // jar, classname
private final List<PathContainerAndReason> manualInspection = new ArrayList<>(); // path, reason
// private final List<PathAndPath> embeddedJars = new ArrayList<>(); // source, temp
private boolean isV2JCas; // false at start of migrate, set to true if a v2 class candidate is
// discovered
private boolean isConvert2v3; // true at start of migrate, set to false if conversion fails, left
// true if already a v3
private boolean isBuiltinJCas; // false at start of migrate, set to true if a built-in class is
// discovered
/************************************
* Context for visits
************************************/
/**
* if non-null, we're inside the ast for a likely JCas getter or setter method
*/
private MethodDeclaration get_set_method;
private String featName;
private boolean isGetter;
private boolean isArraySetter;
/**
* the range name part for _getXXXValue.. calls
*/
private Object rangeNamePart;
/**
* the range name part for _getXXXValue.. calls without converting Ref to Feature
*/
private String rangeNameV2Part;
/**
* temp place to insert static final int feature declarations
*/
private NodeList<BodyDeclaration<?>> fi_fields = new NodeList<>();
private Set<String> featNames = new HashSet<>();
private boolean hasV2Constructors;
private boolean hasV3Constructors;
private boolean error_decompiling = false;
private boolean badClassName;
private int itemCount;
/**
* set if getAndProcessCasndidatesInContainer encounters a class where it cannot do the compile
*/
private boolean unableToCompile;
final private StringBuilder psb = new StringBuilder();
public MigrateJCas() {
}
public static void main(String[] args) {
(new MigrateJCas()).run(args);
}
/***********************************
* Main
*
* @param args
* -
***********************************/
void run(String[] args) {
CommandLineParser clp = parseCommandArgs(args);
System.out.format("Output top directory: %s%n", outputDirectory);
// clear output dir
FileUtils.deleteRecursive(new File(outputDirectory));
isSource = sourcesRoots != null;
boolean isOk;
if (isSource) {
isOk = processRootsCollection("source", sourcesRoots, clp);
} else {
if (javaCompiler == null) {
System.out.println("The migration tool cannot compile the migrated files, \n"
+ " because no Java compiler is available.\n"
+ " To make one available, run this tool using a Java JDK, not JRE");
}
isOk = processRootsCollection("classes", classesRoots, clp);
}
// if (individualClasses != null) {
// processCollection("individual classes: ", new Iterator<String>() {
// Iterator<String> it = Arrays.asList(individualClasses.split(File.pathSeparator)).iterator();
// public boolean hasNext() {return it.hasNext();}
// public String next() {
// return prepareIndividual(it.next());}
// });
// }
if (error_decompiling) {
isOk = false;
}
isOk = report() && isOk;
System.out.println("Migration finished " + (isOk ? "with no unusual conditions."
: "with 1 or more unusual conditions that need manual checking."));
}
/**
* called for compiled input when a compiler is available and don't have name collision if the
* container is a PEAR or a Jar Updates a copy of the Pear or Jar
*
* @param container
*/
private void postProcessPearOrJar(Container container) {
Path outDir = Paths.get(outputDirectory, container.isJar ? "jars" : "pears",
Integer.toString(container.id));
withIOX(() -> Files.createDirectories(outDir));
si(psb).append("Replacing .class files in copy of ").append(container.rootOrig);
flush(psb);
try {
// copy the pear or jar so we don't change the original
Path lastPartOfPath = container.rootOrig.getFileName();
if (null == lastPartOfPath)
throw new RuntimeException("Internal Error");
Path pearOrJarCopy = Paths.get(outputDirectory, container.isJar ? "jars" : "pears",
Integer.toString(container.id), lastPartOfPath.toString());
Files.copy(container.rootOrig, pearOrJarCopy);
// put up a file system on the pear or jar
FileSystem pfs = FileSystems.newFileSystem(pearOrJarCopy, (ClassLoader) null);
// replace the .class files in this PEAR or Jar with corresponding v3 ones
indent[0] += 2;
String[] previousSkip = { "" };
container.v3CompiledPathAndContainerItemPath.forEach(c_p -> {
if (Files.exists(c_p.v3CompiledPath)) {
withIOX(() -> Files.copy(c_p.v3CompiledPath, pfs.getPath(c_p.pathInContainer),
StandardCopyOption.REPLACE_EXISTING));
reportPearOrJarClassReplace(pearOrJarCopy.toString(), c_p.v3CompiledPath.toString(),
container);
} else {
String pstr = c_p.v3CompiledPath.toString();
String pstr2 = pstr;
if (previousSkip[0] != "") {
int cmn = findFirstCharDifferent(previousSkip[0], pstr);
pstr2 = cmn > 5 ? ("..." + pstr.substring(cmn)) : pstr;
}
previousSkip[0] = pstr;
si(psb).append("Skipping replacing ").append(pstr2)
.append(" because it could not be found, perhaps due to compile errors.");
flush(psb);
}
});
indent[0] -= 2;
// for (CommonConverted cc : container.convertedItems) {
// Map<Container, Path> v3ccs = cc.v3CompiledResultPaths;
// v3ccs.forEach((v3ccc, v3cc_path) ->
// {
// if (v3ccc == container) {
// String path_in_v3_classes = cc.v3CompiledResultPaths.get(container).toString();
//
// withIOX(() -> Files.copy(v3cc_path, pfs.getPath(path_in_v3_classes)));
// reportPearOrJarClassReplace(pearOrJarCopy.toString(), path_in_v3_classes, container);
// }
// });
// }
pfs.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
// @formatter:off
/**
* Compile all the migrated JCas classes in this container, adjusting the classpath if the
* container is a Jar or Pear to include the Jar or PEAR.
*
* As a side effect, it saves in the container, a list of all the compiled things together with
* the path in container part, for use by a subsequent step to update copies of the jars/pears.
*
* The items in the container are broken into batches of multiple classes to be compiled together.
* - The grouping is to group by alternative number. This insures that multiple
* definitions of the same class are done separately (otherwise the compiler complains
* about multiple definitions).
*
* As a side effect. compiling update the container, adding all the compiled items
* to v3CompiledPathAndContainerItemPath
*
* @param container -
* @return true if compiled 1 or more sources, false if nothing was compiled
*/
// @formatter:on
private boolean compileV3SourcesCommon2(Container container) {
String classesBaseDir = outDirConverted + "v3-classes/" + container.id;
// specify the classpath. For PEARs use a class loader that loads first.
String classpath = getCompileClassPath(container);
// // debug
// String[] cpa = classpath.split(File.pathSeparator);
// System.out.println("debug - compilation classpath");
// int j = 0;
// for (String s : cpa) System.out.println("debug classpath: " + (++j) + " " + s);
// get a list of compilation unit path strings to the converted/v3/nnn/path
/**
* containerRoot is rootOrig or for Jars/Pears the Path to "/" in the zip file system
*/
Path containerRoot = null;
// @formatter:off
/**
* The Cu Path Strings for one container might have multiple instances of the class.
* These might be for identical or different sources.
* - This happens when a root has multiple paths to instances of the same class.
* - Multiple compiled-paths might be for the same classname
*
* For non-identical sources, the commonContainer instance "id" is spliced into the
* v3 migrated source path: see getBaseOutputPath, e.g. converted/2/a3/fqcn/slashed/name.java
*
* The compiler will complain if you feed it the same compilation unit classname twice, with
* different paths saying "duplicate class definition".
* - Fix: do compilation in batches, one for each different commonConverted id.
*/
// @formatter:on
Map<Integer, ArrayList<String>> cu_path_strings_by_ccId = new TreeMap<>(); // tree map to have
// nice order of keys
indent[0] += 2;
boolean isEmpty = true;
for (CommonConverted cc : container.convertedItems) {
if (cc.v3SourcePath == null)
continue; // skip items that failed migration
isEmpty = false;
// relativePathInContainer = the whole path with the first part (up to the end of the
// container root) stripped off
/**
* itemPath is the original path in the container to where the source or class file is For
* Jars and PEARs, it is relative to the Jar or PEAR
*/
Path itemPath = cc.getV2SourcePath(container);
if (null == containerRoot) {
// lazy setup on first call
// for Pears, must use the == filesystem, otherwise get
// ProviderMismatchException
containerRoot = (container.isJar || container.isPear)
? itemPath.getFileSystem().getPath("/")
: container.rootOrig;
}
/**
* relativePathInContainer might be x/y/z/a/b/c/name.class (ends in .class because we only get
* here when the input is class files)
*/
String relativePathInContainer = containerRoot.relativize(itemPath).toString();
container.v3CompiledPathAndContainerItemPath.add(new V3CompiledPathAndContainerItemPath(
Paths.get(classesBaseDir, "a" + cc.id,
cc.fqcn_slash + ".class" /* relativePathInContainer */),
relativePathInContainer));
ArrayList<String> items = cu_path_strings_by_ccId.computeIfAbsent(cc.id,
x -> new ArrayList<>());
items.add(cc.v3SourcePath.toString());
}
if (isEmpty) {
si(psb).append("Skipping compiling for container ").append(container.id).append(" ")
.append(container.rootOrig);
si(psb).append(" because non of the v2 classes were migrated (might have been built-ins)");
flush(psb);
return false;
} else {
si(psb).append("Compiling for container ").append(container.id).append(" ")
.append(container.rootOrig);
flush(psb);
}
// List<String> cu_path_strings = container.convertedItems.stream()
// .filter(cc -> cc.v3SourcePath != null)
// .peek(cc -> container.v3CompiledPathAndContainerItemPath.add(
// new V3CompiledPathAndContainerItemPath(
// Paths.get(classesBaseDir, cc.v3SourcePath.toString()),
// getPathInContainer(container, cc).toString())))
// .map(cc -> cc.v3SourcePath.toString())
// .collect(Collectors.toList());
boolean resultOk = true;
for (int ccId = 0;; ccId++) { // do each version of classes separately
List<String> cups = cu_path_strings_by_ccId.get(ccId);
if (cups == null) {
break;
}
StandardJavaFileManager fileManager = javaCompiler.getStandardFileManager(null, null,
Charset.forName("UTF-8"));
Iterable<? extends JavaFileObject> compilationUnits = fileManager
.getJavaFileObjectsFromStrings(cups);
// //debug
// System.out.println("Debug: list of compilation unit strings for iteration " + i);
// int[] k = new int[] {0};
// cups.forEach(s -> System.out.println(Integer.toString(++(k[0])) + " " + s));
// System.out.println("debug end");
String classesBaseDirN = classesBaseDir + "/a" + ccId;
withIOX(() -> Files.createDirectories(Paths.get(classesBaseDirN)));
Iterable<String> options = Arrays.asList("-d", classesBaseDirN, "-classpath", classpath);
si(psb).append("Compiling for commonConverted version ").append(ccId).append(", ")
.append(cups.size()).append(" classes");
flush(psb);
DiagnosticCollector<JavaFileObject> diagnostics = new DiagnosticCollector<>();
/*********** Compile ***********/
resultOk = javaCompiler
.getTask(null, fileManager, diagnostics, options, null, compilationUnits).call()
&& resultOk;
/********************************/
indent[0] += 2;
for (Diagnostic<? extends JavaFileObject> diagnostic : diagnostics.getDiagnostics()) {
JavaFileObject s = diagnostic.getSource();
si(psb).append(diagnostic.getKind());
int lineno = (int) diagnostic.getLineNumber();
if (lineno != Diagnostic.NOPOS) {
psb.append(" on line ").append(diagnostic.getLineNumber());
}
int pos = (int) diagnostic.getPosition();
if (pos != Diagnostic.NOPOS) {
psb.append(", position: ").append(diagnostic.getColumnNumber());
}
if (s != null) {
psb.append(" in ").append(s.toUri());
}
si(psb).append(" ").append(diagnostic.getMessage(null));
flush(psb);
}
withIOX(() -> fileManager.close());
indent[0] -= 2;
si(psb).append("Compilation finished")
.append(resultOk ? " with no errors." : "with some errors.");
flush(psb);
}
indent[0] -= 2;
unableToCompile = !resultOk;
return true;
}
// @formatter:off
/**
* The classpath used to compile is (in precedence order)
* - the classpath for this migration app (first in order to pick up v3 support, overriding others)
* - any Pears, going up the parent chain, closest ones first
* - any Jars, going up the parent chain, closest ones last
* - passed in migrate classpath
* @return the classpath to use in compiling the jcasgen'd sources
*/
// @formatter:on
private String getCompileClassPath(Container container) {
// start with this (the v3migration tool) app's classpath to a cp string
URLClassLoader systemClassLoader = (URLClassLoader) ClassLoader.getSystemClassLoader();
URL[] urls = systemClassLoader.getURLs();
StringBuilder cp = new StringBuilder();
boolean firstTime = true;
for (URL url : urls) {
if (!firstTime) {
cp.append(File.pathSeparatorChar);
} else {
firstTime = false;
}
cp.append(url.getPath());
}
// pears up the classpath, closest first
Container c = container;
while (c != null) {
if (c.isPear) {
cp.append(File.pathSeparator).append(c.pearClasspath);
}
c = c.parent;
}
// add the migrateClasspath, expanded
if (null != migrateClasspath) {
cp.append(File.pathSeparator).append(Misc.expandClasspath(migrateClasspath));
}
// add the Jars, closest last
c = container;
List<String> ss = new ArrayList<>();
while (c != null) {
if (c.isJar) {
ss.add(c.root.toString());
}
c = c.parent;
}
Collections.reverse(ss);
ss.forEach(s -> cp.append(File.pathSeparator).append(s));
// System.out.println("debug: compile classpath = " + cp.toString());
return cp.toString();
}
/**
* iterate to process collections from all roots Called once, to process either sources or classes
*
* @return false if unable to compile, true otherwise
*/
private boolean processRootsCollection(String kind, Container[] roots, CommandLineParser clp) {
unableToCompile = false; // preinit
psb.setLength(0);
indent[0] = 0;
itemCount = 1;
for (Container rootContainer : roots) {
showWorkStart(rootContainer);
// adds candidates to root containers, and adds sub containers for Jars and Pears
getAndProcessCandidatesInContainer(rootContainer);
// for (Path path : rootContainer.candidates) {
//
// CommonConverted cc = getSource(path, rootContainer);
// migrate(cc, rootContainer, path);
//
// if ((i % 50) == 0) System.out.format("%4d%n ", Integer.valueOf(i));
// i++;
// }
}
si(psb).append("Total number of candidates processed: ").append(itemCount - 1);
flush(psb);
indent[0] = 0;
return !unableToCompile;
}
private void showWorkStart(Container rootContainer) {
si(psb).append("Migrating " + rootContainer.rootOrig.toString());
indent[0] += 2;
si(psb).append("Each character is one class");
si(psb).append(" . means normal class");
si(psb).append(" b means built in");
si(psb).append(" i means identical duplicate");
si(psb).append(" d means non-identical definition for the same JCas class");
si(psb).append(" nnn at the end of the line is the number of classes migrated\n");
flush(psb);
}
/**
* parse command line args
*
* @param args
* -
* @return the CommandLineParser instance
*/
private CommandLineParser parseCommandArgs(String[] args) {
CommandLineParser clp = createCmdLineParser();
try {
clp.parseCmdLine(args);
} catch (Exception e) {
throw new RuntimeException(e);
}
if (!checkCmdLineSyntax(clp)) {
printUsage();
System.exit(2);
}
if (clp.isInArgsList(CLASS_FILE_ROOTS)) {
classesRoots = getRoots(clp, CLASS_FILE_ROOTS);
}
if (clp.isInArgsList(SOURCE_FILE_ROOTS)) {
sourcesRoots = getRoots(clp, SOURCE_FILE_ROOTS);
}
return clp;
}
private Container[] getRoots(CommandLineParser clp, String kind) {
String[] paths = clp.getParamArgument(kind).split("\\" + File.pathSeparator);
Container[] cs = new Container[paths.length];
int i = 0;
for (String path : paths) {
cs[i++] = new Container(null, Paths.get(path));
}
return cs;
}
/**
* @param p
* the path to the compiled or non-compiled source
* @param container
* the container
* @return the instance of the CommonConverted object, and update the container's convertedItems
* list if needed to include it
*/
private CommonConverted getSource(Path p, Container container) {
try {
byte[] localV2ByteCode = null;
CommonConverted cc;
String v2Source;
if (!isSource) {
localV2ByteCode = Files.readAllBytes(p);
// only use prev decompiled if same container
cc = container.origBytesToCommonConverted.get(localV2ByteCode);
if (null != cc) {
return cc;
}
// decompile side effect: sets fqcn
try {
v2Source = decompile(localV2ByteCode, container.pearClasspath);
} catch (RuntimeException e) {
badClassName = true;
e.printStackTrace();
v2Source = null;
}
if (badClassName) {
System.err.println("Candidate with bad Class Name is: " + p.toString());
return null;
}
final byte[] finalbc = localV2ByteCode;
cc = sourceToCommonConverted.computeIfAbsent(v2Source,
src -> new CommonConverted(src, finalbc, p, container, packageAndClassNameSlash));
// cc = new CommonConverted(v2Source, localV2ByteCode, p, container,
// packageAndClassNameSlash);
container.origBytesToCommonConverted.put(localV2ByteCode, cc);
} else {
v2Source = FileUtils.reader2String(Files.newBufferedReader(p));
cc = sourceToCommonConverted.get(v2Source);
if (null == cc) {
cc = new CommonConverted(v2Source, null, p, container, "unknown");
sourceToCommonConverted.put(v2Source, cc);
} else {
// add this new path + container to set of pathsAndContainers kept by this CommonConverted
// object
cc.containersAndV2Paths.add(new ContainerAndPath(p, container));
}
}
// Containers have list of CommonConverted, which, in turn
// have Set of ContainerAndPath elements.
// (the same JCas class might appear in two different paths in a container)
if (!container.convertedItems.contains(cc)) {
container.convertedItems.add(cc);
}
return cc;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
// @formatter:off
/**
* Migrate one JCas definition, writes to Sysem.out 1 char to indicate progress.
*
* The source is either direct, or a decompiled version of a .class file (missing comments, etc.).
*
* This method only called if heuristics indicate this is a V2 JCas class definition.
*
* Skips the migration if already done.
* Skips if decompiling, and it failed.
*
* The goal is to preserve as much as possible of existing customization.
* The general approach is to parse the source into an AST, and use visitor methods.
* For getter/setter methods that are for features (heurstic), set up a context for inner visitors
* identifying the getter / setter.
* - reuse method declarator, return value casts, value expressions
* - remove feature checking statement, array bounds checking statement, if present.
* - replace the simpleCore (see Jg), replace the arrayCore
*
* For constructors, replace the 2-arg one that has arguments:
* addr and TOP_Type with the v3 one using TypeImpl, CasImpl.
*
* Add needed imports.
* Add for each feature the _FI_xxx static field declarator.
*
* Leave other top level things alone
* - additional constructors.
* - other methods not using jcasType refs
*
* @param source - the source, either directly from a .java file, or a decompiled .class file
*/
// @formatter:on
private void migrate(CommonConverted cc, Container container, Path path) {
if (null == cc) {
System.err.println("Skipping this component due to decompile failure: " + path.toString());
System.err.println(" in container: " + container);
isConvert2v3 = false;
error_decompiling = true;
return;
}
if (cc.v3Source != null) {
// next updates classname2multiSources for tracking non-identical defs
boolean identical = collectInfoForReports(cc);
assert identical;
psb.append("i");
flush(psb);
cc.containersAndV2Paths.add(new ContainerAndPath(path, container));
return;
}
assert cc.v2Source != null;
packageName = null;
className = null;
packageAndClassNameSlash = null;
cu = null;
String source = cc.v2Source;
isConvert2v3 = true; // preinit, set false if convert fails
isV2JCas = false; // preinit, set true by reportV2Class, called by visit to
// ClassOrInterfaceDeclaration,
// when it has v2 constructors, and the right type and type_index_id field
// declares
isBuiltinJCas = false;
featNames.clear();
fi_fields.clear();
try { // to reset the next 3 items
current_cc = cc;
current_container = container;
current_path = path;
// System.out.println("Migrating source before migration:\n");
// System.out.println(source);
// System.out.println("\n\n\n");
if (source.startsWith(ERROR_DECOMPILING)) {
System.err.println("Decompiling failed for class: " + cc.toString() + "\n got: "
+ Misc.elide(source, 300, false));
System.err.println("Please check the migrateClasspath");
if (null == migrateClasspath) {
System.err.println("classpath of this app is");
System.err.println(System.getProperty("java.class.path"));
} else {
System.err.println(" first part of migrateClasspath argument was: "
+ Misc.elide(migrateClasspath, 300, false));
System.err.println(" Value used was:");
URL[] urls = Misc.classpath2urls(migrateClasspath);
for (URL url : urls) {
System.err.println(" " + url.toString());
}
}
System.err.println("Skipping this component");
isConvert2v3 = false;
error_decompiling = true;
return;
}
StringReader sr = new StringReader(source);
try {
cu = JavaParser.parse(sr);
addImport("java.lang.invoke.CallSite");
addImport("java.lang.invoke.MethodHandle");
addImport("org.apache.uima.cas.impl.CASImpl");
addImport("org.apache.uima.cas.impl.TypeImpl");
addImport("org.apache.uima.cas.impl.TypeSystemImpl");
this.visit(cu, null); // side effect: sets the className, packageAndClassNameSlash,
// packageName
new removeEmptyStmts().visit(cu, null);
if (isConvert2v3) {
removeImport("org.apache.uima.jcas.cas.TOP_Type");
}
if (isConvert2v3 && fi_fields.size() > 0) {
NodeList<BodyDeclaration<?>> classMembers = cu.getTypes().get(0).getMembers();
int positionOfFirstConstructor = findConstructor(classMembers);
if (positionOfFirstConstructor < 0) {
throw new RuntimeException();
}
classMembers.addAll(positionOfFirstConstructor, fi_fields);
}
ImportDeclaration firstImport = cu.getImports().get(0);
String transformedMessage = String.format(
" Migrated by uimaj-v3-migration-jcas, %s%n" + " Container: %s%n"
+ " Path in container: %s%n",
new Date(), container.toString1(), path.toString()).replace('\\', '/');
Optional<Comment> existingComment = firstImport.getComment();
if (existingComment.isPresent()) {
Comment comment = existingComment.get();
comment.setContent(comment.getContent() + "\n" + transformedMessage);
} else {
firstImport.setBlockComment(transformedMessage);
}
if (isSource) {
sourceToCommonConverted.put(source, cc);
}
boolean identicalFound = collectInfoForReports(cc);
assert !identicalFound;
if (isV2JCas) {
writeV2Orig(cc, isConvert2v3);
}
if (isConvert2v3) {
cc.v3Source = new PrettyPrinter(printCu).print(cu);
writeV3(cc);
}
psb.append(isBuiltinJCas ? "b"
: (classname2multiSources.get(cc.fqcn_slash).size() == 1) ? "." : "d"); // means
// non-identical
// duplicate
flush(psb);
} catch (IOException e) {
e.printStackTrace();
throw new UIMARuntimeException(e);
} catch (Exception e) {
System.out.println("debug: exception caught, source was\n" + source);
throw new UIMARuntimeException(e);
}
} finally {
current_cc = null;
current_container = null;
current_path = null;
}
}
/**
* Called when have already converted this exact source or when we just finished converting this
* source. Add this instance to the tracking information for multiple versions (identical or not)
* of a class
*
* @return true if this is an identical duplicate of one already done
*/
private boolean collectInfoForReports(CommonConverted cc) {
String fqcn_slash = cc.fqcn_slash;
// track, by fqcn, all duplicates (identical or not)
// // for a given fully qualified class name (slashified),
// // find the list of CommonConverteds - one per each different version
// // create it if null
List<CommonConverted> commonConverteds = classname2multiSources.computeIfAbsent(fqcn_slash,
k -> new ArrayList<>());
// search to see if this instance already in the set
// if so, add the path to the set of identicals
// For class sources case, we compare the decompiled version
boolean found = commonConverteds.contains(cc);
if (!found) {
commonConverteds.add(cc);
}
return found;
}
/******************
* Visitors
******************/
/**
* Capture the type name from all top-level types AnnotationDeclaration, Empty, and Enum
*/
@Override
public void visit(AnnotationDeclaration n, Object ignore) {
updateClassName(n);
super.visit(n, ignore);
}
// @Override
// public void visit(EmptyTypeDeclaration n, Object ignore) {
// updateClassName(n);
// super.visit(n, ignore);
// }
@Override
public void visit(EnumDeclaration n, Object ignore) {
updateClassName(n);
super.visit(n, ignore);
}
// @formatter:off
/**
* Check if the top level class looks like a JCas class, and report if not:
* has 0, 1, and 2 element constructors
* has static final field defs for type and typeIndexID
*
* Also check if V2 style: 2 arg constructor arg types
* Report if looks like V3 style due to args of 2 arg constructor
*
* if class doesn't extend anything, not a JCas class.
* if class is enum, not a JCas class
* @param n -
* @param ignore -
*/
// @formatter:on
@Override
public void visit(ClassOrInterfaceDeclaration n, Object ignore) {
// do checks to see if this is a JCas class; if not report skipped
Optional<Node> maybeParent = n.getParentNode();
if (maybeParent.isPresent()) {
Node parent = maybeParent.get();
if (parent instanceof CompilationUnit) {
updateClassName(n);
if (isBuiltinJCas) {
// is a built-in class, skip it
super.visit(n, ignore);
return;
}
NodeList<ClassOrInterfaceType> supers = n.getExtendedTypes();
if (supers == null || supers.size() == 0) {
reportNotJCasClass("class doesn't extend a superclass");
super.visit(n, ignore);
return;
}
NodeList<BodyDeclaration<?>> members = n.getMembers();
setHasJCasConstructors(members);
if (hasV2Constructors && hasTypeFields(members)) {
reportV2Class();
super.visit(n, ignore);
return;
}
if (hasV2Constructors) {
reportNotJCasClassMissingTypeFields();
return;
}
if (hasV3Constructors) {
reportV3Class();
return;
}
reportNotJCasClass("missing v2 constructors");
return;
}
}
super.visit(n, ignore);
return;
}
@Override
public void visit(PackageDeclaration n, Object ignored) {
packageName = n.getNameAsString();
super.visit(n, ignored);
}
/***************
* Constructors - modify the 2 arg constructor - changing the args and the body
*
* @param n
* - the constructor node
* @param ignored
* -
*/
@Override
public void visit(ConstructorDeclaration n, Object ignored) {
super.visit(n, ignored); // processes the params
if (!isConvert2v3) { // for enums, annotations
return;
}
List<Parameter> ps = n.getParameters();
if (ps.size() == 2 && getParmTypeName(ps, 0).equals("int")
&& getParmTypeName(ps, 1).equals("TOP_Type")) {
/**
* public Foo(TypeImpl type, CASImpl casImpl) { super(type, casImpl); readObject();
*/
setParameter(ps, 0, "TypeImpl", "type");
setParameter(ps, 1, "CASImpl", "casImpl");
// Body: change the 1st statement (must be super)
NodeList<Statement> stmts = n.getBody().getStatements();
if (!(stmts.get(0) instanceof ExplicitConstructorInvocationStmt)) {
recordBadConstructor("missing super call");
return;
}
NodeList<Expression> args = ((ExplicitConstructorInvocationStmt) (stmts.get(0)))
.getArguments();
args.set(0, new NameExpr("type"));
args.set(1, new NameExpr("casImpl"));
// leave the rest unchanged.
}
}
private final static Pattern refGetter = Pattern
.compile("(ll_getRef(Array)?Value)|" + "(ll_getFSForRef)");
private final static Pattern word1 = Pattern.compile("\\A(\\w*)"); // word chars starting at
// beginning \\A means
// beginning
// @formatter:off
/*****************************
* Method Declaration Visitor
* Heuristic to determine if a feature getter or setter:
* - name: is 4 or more chars, starting with get or set, with 4th char uppercase
* is not "getTypeIndexID"
* - (optional - if comments are available:)
* getter for xxx, setter for xxx
* - for getter: has 0 or 1 arg (1 arg case for indexed getter, arg must be int type)
* - for setter: has 1 or 2 args
*
* Workaround for decompiler - getters which return FSs might be missing the cast to the return value type
*
*****************************/
// @formatter:on
@Override
public void visit(MethodDeclaration n, Object ignore) {
String name = n.getNameAsString();
isGetter = isArraySetter = false;
do { // to provide break exit
if (name.length() >= 4 && ((isGetter = name.startsWith("get")) || name.startsWith("set"))
&& Character.isUpperCase(name.charAt(3)) && !name.equals("getTypeIndexID")) {
List<Parameter> ps = n.getParameters();
if (isGetter) {
if (ps.size() > 1)
break;
} else { // is setter
if (ps.size() > 2 || ps.size() == 0)
break;
if (ps.size() == 2) {
if (!getParmTypeName(ps, 0).equals("int"))
break;
isArraySetter = true;
}
}
// get the range-part-name and convert to v3 range ("Ref" changes to "Feature")
String bodyString = n.getBody().get().toString(printWithoutComments);
int i = bodyString.indexOf("jcasType.ll_cas.ll_");
if (i < 0)
break;
String s = bodyString.substring(i + "jcasType.ll_cas.ll_get".length()); // also for
// ...ll_set - same
// length!
if (s.startsWith("FSForRef(")) { // then it's the wrapper and the wrong instance.
i = s.indexOf("jcasType.ll_cas.ll_");
if (i < 0) {
reportUnrecognizedV2Code(
"Found \"jcasType.ll_cas.ll_[set or get]...FSForRef(\" but didn't find following \"jcasType.ll_cas_ll_\"\n"
+ n.toString());
break;
}
s = s.substring(i + "jcasType.ll_cas.ll_get".length());
}
i = s.indexOf("Value");
if (i < 0) {
reportUnrecognizedV2Code(
"Found \"jcasType.ll_cas.ll_[set or get]\" but didn't find following \"Value\"\n"
+ n.toString());
break; // give up
}
s = Character.toUpperCase(s.charAt(0)) + s.substring(1, i);
rangeNameV2Part = s;
rangeNamePart = s.equals("Ref") ? "Feature" : s;
// get feat name following ")jcasType).casFeatCode_xxxxx,
i = bodyString.indexOf("jcasType).casFeatCode_");
if (i == -1) {
reportUnrecognizedV2Code("Didn't find \"...jcasType).casFeatCode_\"\n" + n.toString());
break;
}
Matcher m = word1.matcher(bodyString.substring(i + "jcasType).casFeatCode_".length()));
if (!m.find()) {
reportUnrecognizedV2Code(
"Found \"...jcasType).casFeatCode_\" but didn't find subsequent word\n"
+ n.toString());
break;
}
featName = m.group(1);
String fromMethod = Character.toLowerCase(name.charAt(3)) + name.substring(4);
if (!featName.equals(fromMethod)) {
// don't report if the only difference is the first letter captialization
if (!(Character.toLowerCase(featName.charAt(0)) + featName.substring(1))
.equals(fromMethod)) {
reportMismatchedFeatureName(String.format("%-25s %s", featName, name));
}
}
// add _FI_xxx = TypeSystemImpl.getAdjustedFeatureOffset("xxx");
// replaced Sept 2017
// NodeList<Expression> args = new NodeList<>();
// args.add(new StringLiteralExpr(featName));
// VariableDeclarator vd = new VariableDeclarator(
// intType,
// "_FI_" + featName,
// new MethodCallExpr(new NameExpr("TypeSystemImpl"), new
// SimpleName("getAdjustedFeatureOffset"), args));
// if (featNames.add(featName)) { // returns true if it was added, false if already in the
// set of featNames
// fi_fields.add(new FieldDeclaration(public_static_final, vd));
// }
// add _FC_xxx = TypeSystemImpl.createCallSite(ccc.class, "xxx");
// add _FH_xxx = _FC_xxx.dynamicInvoker();
// add _FeatName_xxx = "xxx" // https://issues.apache.org/jira/browse/UIMA-5575
if (featNames.add(featName)) { // returns true if it was added, false if already in the set
// of featNames
// _FC_xxx = TypeSystemImpl.createCallSite(ccc.class, "xxx");
MethodCallExpr initCallSite = new MethodCallExpr(new NameExpr("TypeSystemImpl"),
"createCallSite");
initCallSite.addArgument(new FieldAccessExpr(new NameExpr(className), "class"));
initCallSite.addArgument(new StringLiteralExpr(featName));
VariableDeclarator vd_FC = new VariableDeclarator(callSiteType, "_FC_" + featName,
initCallSite);
fi_fields.add(new FieldDeclaration(private_static_final, vd_FC));
// _FH_xxx = _FC_xxx.dynamicInvoker();
MethodCallExpr initInvoker = new MethodCallExpr(new NameExpr(vd_FC.getName()),
"dynamicInvoker");
VariableDeclarator vd_FH = new VariableDeclarator(methodHandleType, "_FH_" + featName,
initInvoker);
fi_fields.add(new FieldDeclaration(private_static_final, vd_FH));
// _FeatName_xxx = "xxx" // https://issues.apache.org/jira/browse/UIMA-5575
VariableDeclarator vd_fn = new VariableDeclarator(stringType, "_FeatName_" + featName,
new StringLiteralExpr(featName));
fi_fields.add(new FieldDeclaration(public_static_final, vd_fn));
}
/**
* add missing cast stmt for return stmts where the value being returned: - doesn't have a
* cast already - has the expression be a methodCallExpr with a name which looks like:
* ll_getRefValue or ll_getRefArrayValue
*/
if (isGetter && "Feature".equals(rangeNamePart)) {
for (Statement stmt : n.getBody().get().getStatements()) {
if (stmt instanceof ReturnStmt) {
Expression e = getUnenclosedExpr(((ReturnStmt) stmt).getExpression().get());
if ((e instanceof MethodCallExpr)) {
String methodName = ((MethodCallExpr) e).getNameAsString();
if (refGetter.matcher(methodName).matches()) { // ll_getRefValue or
// ll_getRefArrayValue
addCastExpr(stmt, n.getType());
}
}
}
}
}
get_set_method = n; // used as a flag during inner "visits" to signal
// we're inside a likely feature setter/getter
} // end of test for getter or setter method
} while (false); // do once, provide break exit
super.visit(n, ignore);
get_set_method = null; // after visiting, reset the get_set_method to null
}
/**
* Visitor for if stmts - removes feature missing test
*/
@Override
public void visit(IfStmt n, Object ignore) {
do {
// if (get_set_method == null) break; // sometimes, these occur outside of recogn.
// getters/setters
Expression c = n.getCondition(), e;
BinaryExpr be, be2;
List<Statement> stmts;
if ((c instanceof BinaryExpr) && ((be = (BinaryExpr) c).getLeft() instanceof FieldAccessExpr)
&& ((FieldAccessExpr) be.getLeft()).getNameAsString().equals("featOkTst")) {
// remove the feature missing if statement
// verify the remaining form
if (!(be.getRight() instanceof BinaryExpr)
|| !((be2 = (BinaryExpr) be.getRight()).getRight() instanceof NullLiteralExpr)
|| !(be2.getLeft() instanceof FieldAccessExpr)
|| !((e = getExpressionFromStmt(n.getThenStmt())) instanceof MethodCallExpr)
|| !(((MethodCallExpr) e).getNameAsString()).equals("throwFeatMissing")) {
reportDeletedCheckModified("The featOkTst was modified:\n" + n.toString() + '\n');
}
BlockStmt parent = (BlockStmt) n.getParentNode().get();
stmts = parent.getStatements();
stmts.set(stmts.indexOf(n), new EmptyStmt()); // dont remove
// otherwise iterators fail
// parent.getStmts().remove(n);
return;
}
} while (false);
super.visit(n, ignore);
}
/**
* visitor for method calls
*/
@Override
public void visit(MethodCallExpr n, Object ignore) {
Optional<Node> p1, p2, p3 = null;
Node updatedNode = null;
NodeList<Expression> args;
do {
if (get_set_method == null)
break;
/** remove checkArraybounds statement **/
if (n.getNameAsString().equals("checkArrayBounds")
&& ((p1 = n.getParentNode()).isPresent() && p1.get() instanceof ExpressionStmt)
&& ((p2 = p1.get().getParentNode()).isPresent() && p2.get() instanceof BlockStmt)
&& ((p3 = p2.get().getParentNode()).isPresent() && p3.get() == get_set_method)) {
NodeList<Statement> stmts = ((BlockStmt) p2.get()).getStatements();
stmts.set(stmts.indexOf(p1.get()), new EmptyStmt());
return;
}
// convert simpleCore expression ll_get/setRangeValue
boolean useGetter = isGetter || isArraySetter;
if (n.getNameAsString()
.startsWith("ll_" + (useGetter ? "get" : "set") + rangeNameV2Part + "Value")) {
args = n.getArguments();
if (args.size() != (useGetter ? 2 : 3))
break;
String suffix = useGetter ? "Nc" : rangeNamePart.equals("Feature") ? "NcWj" : "Nfc";
String methodName = "_" + (useGetter ? "get" : "set") + rangeNamePart + "Value" + suffix;
args.remove(0); // remove the old addr arg
// arg 0 converted when visiting args FieldAccessExpr
n.setScope(null);
n.setName(methodName);
}
// convert array sets/gets
String z = "ll_" + (isGetter ? "get" : "set");
String nname = n.getNameAsString();
if (nname.startsWith(z) && nname.endsWith("ArrayValue")) {
String s = nname.substring(z.length());
s = s.substring(0, s.length() - "Value".length()); // s = "ShortArray", etc.
if (s.equals("RefArray"))
s = "FSArray";
if (s.equals("IntArray"))
s = "IntegerArray";
EnclosedExpr ee = new EnclosedExpr(
new CastExpr(new ClassOrInterfaceType(s), n.getArguments().get(0)));
n.setScope(ee); // the getter for the array fs
n.setName(isGetter ? "get" : "set");
n.getArguments().remove(0);
}
/** remove ll_getFSForRef **/
/** remove ll_getFSRef **/
if (n.getNameAsString().equals("ll_getFSForRef")
|| n.getNameAsString().equals("ll_getFSRef")) {
updatedNode = replaceInParent(n, n.getArguments().get(0));
}
} while (false);
if (updatedNode != null) {
updatedNode.accept(this, null);
} else {
super.visit(n, null);
}
}
/**
* visitor for field access expressions - convert ((...type_Type)jcasType).casFeatCode_XXXX to
* _FI_xxx
*
* @param n
* -
* @param ignore
* -
*/
@Override
public void visit(FieldAccessExpr n, Object ignore) {
Expression e;
Optional<Expression> oe;
String nname = n.getNameAsString();
if (get_set_method != null) {
if (nname.startsWith("casFeatCode_") && ((oe = n.getScope()).isPresent())
&& ((e = getUnenclosedExpr(oe.get())) instanceof CastExpr)
&& ("jcasType".equals(getName(((CastExpr) e).getExpression())))) {
String featureName = nname.substring("casFeatCode_".length());
// replaceInParent(n, new NameExpr("_FI_" + featureName)); // repl last in List<Expression>
// (args)
MethodCallExpr getint = new MethodCallExpr(null, "wrapGetIntCatchException");
getint.addArgument(new NameExpr("_FH_" + featureName));
replaceInParent(n, getint);
return;
} else if (nname.startsWith("casFeatCode_")) {
reportMigrateFailed(
"Found field casFeatCode_ ... without a previous cast expr using jcasType");
}
}
super.visit(n, ignore);
}
private class removeEmptyStmts extends VoidVisitorAdapter<Object> {
@Override
public void visit(BlockStmt n, Object ignore) {
n.getStatements().removeIf(statement -> statement instanceof EmptyStmt);
super.visit(n, ignore);
}
// @Override
// public void visit(MethodDeclaration n, Object ignore) {
// if (n.getNameAsString().equals("getModifiablePrimitiveNodes")) {
// System.out.println("debug");
// }
// super.visit(n, ignore);
// if (n.getNameAsString().equals("getModifiablePrimitiveNodes")) {
// System.out.println("debug");
// }
// }
}
// @formatter:off
/**
* converted files:
* java name, path (sorted by java name, v3 name only)
* not-converted:
* java name, path (sorted by java name)
* duplicates:
* java name, path (sorted by java name)
* @return true if it's likely everything converted OK.
*/
// @formatter:on
private boolean report() {
System.out.println("\n\nMigration Summary");
System.out.format("Output top directory: %s%n", outputDirectory);
System.out.format("Date/time: %tc%n", new Date());
pprintRoots("Sources", sourcesRoots);
pprintRoots("Classes", classesRoots);
boolean isOk2 = true;
try {
// these reports, if non-empty items, imply something needs manual checking, so reset isOk2
isOk2 = reportPaths("Workaround Directories", "workaroundDir.txt", pathWorkaround) && isOk2;
isOk2 = reportPaths("Reports of converted files where a deleted check was customized",
"deletedCheckModified.txt", deletedCheckModified) && isOk2;
isOk2 = reportPaths("Reports of converted files needing manual inspection",
"manualInspection.txt", manualInspection) && isOk2;
isOk2 = reportPaths("Reports of files which failed migration", "failed.txt", failedMigration)
&& isOk2;
isOk2 = reportPaths("Reports of non-JCas files", "NonJCasFiles.txt", nonJCasFiles) && isOk2;
isOk2 = reportPaths(
"Builtin JCas classes - skipped - need manual checking to see if they are modified",
"skippedBuiltins.txt", skippedBuiltins) && isOk2;
// these reports, if non-empty, do not imply OK issues
reportPaths("Reports of updated Jars", "jarFileUpdates.txt", jarClassReplace);
reportPaths("Reports of updated PEARs", "pearFileUpdates.txt", pearClassReplace);
// computeDuplicates();
// reportPaths("Report of duplicates - not identical", "nonIdenticalDuplicates.txt",
// nonIdenticalDuplicates);
// reportPaths("Report of duplicates - identical", "identicalDuplicates.txt",
// identicalDuplicates);
// isOk2 = reportDuplicates() && isOk2; // false if non-identical duplicates
return isOk2;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private void pprintRoots(String kind, Container[] roots) {
if (roots != null && roots.length > 0) {
try {
try (BufferedWriter bw = Files.newBufferedWriter(makePath(outDirLog + "ItemsProcessed"),
StandardOpenOption.CREATE)) {
logPrintNl(kind + " Roots:", bw);
indent[0] += 2;
try {
for (Container container : roots) {
pprintContainer(container, bw);
}
logPrintNl("", bw);
} finally {
indent[0] -= 2;
}
}
} catch (IOException e) {
throw new UIMARuntimeException(e);
}
}
}
private void pprintContainer(Container container, BufferedWriter bw) throws IOException {
logPrintNl(container.toString(), bw);
if (container.subContainers.size() > 1) {
logPrintNl("", bw);
indent[0] += 2;
for (Container subc : container.subContainers) {
pprintContainer(subc, bw);
}
}
}
// private void computeDuplicates() {
// List<ClassnameAndPath> toCheck = new ArrayList<>(c2ps);
// toCheck.addAll(extendableBuiltins);
// sortReport2(toCheck);
// ClassnameAndPath prevP = new ClassnameAndPath(null, null);
// List<ClassnameAndPath> sameList = new ArrayList<>();
// boolean areAllEqual = true;
//
// for (ClassnameAndPath p : toCheck) {
// if (!p.getFirst().equals(prevP.getFirst())) {
//
// addToIdenticals(sameList, areAllEqual);
// sameList.clear();
// areAllEqual = true;
//
// prevP = p;
// continue;
// }
//
// // have 2nd or subsequent same class
// if (sameList.size() == 0) {
// sameList.add(prevP);
// }
// sameList.add(p);
// if (areAllEqual) {
// if (isFilesMiscompare(p.path, prevP.path)) {
// areAllEqual = false;
// }
// }
// }
//
// addToIdenticals(sameList, areAllEqual);
// }
// /**
// * Compare two java source or class files
// * @param p1
// * @param p2
// * @return
// */
// private boolean isFilesMiscompare(Path p1, Path p2) {
// String s1 = (p1);
// String s2 = (p2);
// return !s1.equals(s2);
// }
// private void addToIdenticals(List<ClassnameAndPath> sameList, boolean areAllEqual) {
// if (sameList.size() > 0) {
// if (areAllEqual) {
// identicalDuplicates.addAll(sameList);
// } else {
// nonIdenticalDuplicates.addAll(sameList);
// }
// }
// }
/**
*
* @param name
* @return a path made from name, with directories created
* @throws IOException
*/
private Path makePath(String name) throws IOException {
Path p = Paths.get(name);
Path parent = p.getParent(); // all the parts of the path up to the final segment
if (parent == null) {
return p;
}
try {
Files.createDirectories(parent);
} catch (FileAlreadyExistsException e) { // parent already exists but is not a directory!
// caused by running on Windows system which ignores "case"
// there's a file at /x/y/ named "z", but the path wants to be /x/y/Z/
// Workaround: change "z" to "z_c" c for capitalization issue
current_container.haveDifferentCapitalizedNamesCollidingOnWindows = true;
Path fn = parent.getFileName();
if (fn == null) {
throw new IllegalArgumentException();
}
String newDir = fn.toString() + "_c";
Path parent2 = parent.getParent();
Path p2 = parent2 == null ? Paths.get(newDir) : Paths.get(parent2.toString(), newDir);
try {
Files.createDirectories(p2);
} catch (FileAlreadyExistsException e2) { // parent already exists but is not a directory!
throw new RuntimeException(e2);
}
reportPathWorkaround(parent.toString(), p2.toString());
Path lastPartOfPath = p.getFileName();
if (null == lastPartOfPath)
throw new RuntimeException();
return Paths.get(p2.toString(), lastPartOfPath.toString());
}
return p;
}
private void logPrint(String msg, Writer bw) throws IOException {
System.out.print(msg);
bw.write(msg);
}
private void logPrintNl(String msg, Writer bw) throws IOException {
logPrint(msg, bw);
logPrint("\n", bw);
}
/**
* prints "There were no xxx" if there are no items. prints a title, followed by a
* ================== underneath it
*
* prints a sorted report of two fields.
*
* @param title
* title of report
* @param fileName
* file name to save the report in (as well as print to sysout
* @param items
* the set of items to report on-
* @return true if items were empty
* @throws IOException
* -
*/
private <T, U> boolean reportPaths(String title, String fileName,
List<? extends Report2<T, U>> items) throws IOException {
if (items.size() == 0) {
System.out.println("There were no " + title);
return true;
}
System.out.println("\n" + title);
for (int i = 0; i < title.length(); i++)
System.out.print('=');
System.out.println("");
try (BufferedWriter bw = Files.newBufferedWriter(makePath(outDirLog + fileName),
StandardOpenOption.CREATE)) {
List<Report2<T, U>> sorted = new ArrayList<>(items);
sortReport2(sorted);
int max = 0;
int nbrFirsts = 0;
Object prevFirst = null;
for (Report2<T, U> p : sorted) {
max = Math.max(max, p.getFirstLength());
Comparable<T> first = p.getFirst();
if (first != prevFirst) {
prevFirst = first;
nbrFirsts++;
}
}
/**
* Two styles. Style 1: where nbrFirst <= 25% nbr: first on separate line, seconds indented
* Style 2: firsts and seconds on same line.
*/
int i = 1;
boolean style1 = nbrFirsts <= sorted.size() / 4;
prevFirst = null;
for (Report2<T, U> p : sorted) {
if (style1) {
if (prevFirst != p.getFirst()) {
prevFirst = p.getFirst();
logPrintNl(String.format("\n For: %s", p.getFirst()), bw);
}
logPrintNl(String.format(" %5d %s", i, p.getSecond()), bw);
} else {
logPrintNl(String.format("%5d %-" + max + "s %s", i, p.getFirst(), p.getSecond()), bw);
}
i++;
}
System.out.println("");
} // end of try-with-resources
return false;
}
private boolean isZipFs(Object o) {
// Surprise! sometimes the o is not an instance of FileSystem but is the zipfs anyways
return o.getClass().getName().contains("zipfs"); // java 8 and 9
}
/**
* Sort the items on first, then second
*
* @param items
*/
private <T, U> void sortReport2(List<? extends Report2<T, U>> items) {
items.sort((o1, o2) -> {
int r = protectedCompare(o1.getFirst(), o2.getFirst());
if (r == 0) {
r = protectedCompare(o1.getSecond(), o2.getSecond());
}
return r;
});
}
/**
* protect against comparing zip fs with non-zip fs - these are not comparable to each other in
* IBM Java 8
*
* @return -
*/
private <T> int protectedCompare(Comparable<T> comparable, Comparable<T> comparable2) {
// debug
try {
if (isZipFs(comparable)) {
if (isZipFs(comparable2)) {
return comparable.compareTo((T) comparable2); // both zip
} else {
return 1;
}
} else {
if (isZipFs(comparable2)) {
return -1;
} else {
return comparable.compareTo((T) comparable2); // both not zip
}
}
} catch (ClassCastException e) {
// debug
System.out.format("Internal error: c1: %b c2: %b%n c1: %s%n c2: %s%n", isZipFs(comparable),
isZipFs(comparable2), comparable.getClass().getName(),
comparable2.getClass().getName());
throw e;
}
}
// @formatter:off
/**
* Called only for top level roots. Sub containers recurse getCandidates_processFile2.
*
* Walk the directory tree rooted at root
* - descend subdirectories
* - descend Jar file
* -- descend nested Jar files (!)
* by extracting these to a temp dir, and keeping a back reference to where they were extracted from.
*
* output the paths representing the classes to migrate:
* classes having a _Type partner
* excluding things other than .java or .classes, and excluding anything with "$" in the name
* - the path includes the "file system".
* @param root
* @throws IOException
*/
// @formatter:on
private void getAndProcessCandidatesInContainer(Container container) {
// current_paths2RootIds = top_paths2RootIds; // don't do lower, that's called within Jars etc.
if (container.isSingleJavaSource) {
getCandidates_processFile2(container.root, container);
} else {
try (Stream<Path> stream = Files.walk(container.root, FileVisitOption.FOLLOW_LINKS)) { // needed
// to
// release
// file
// handles
stream.forEachOrdered(
// only puts into the RootIds possible Fqcn (ending in either .class or .java)
p -> getCandidates_processFile2(p, container));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
// walk from root container, remove items not JCas candidates
// prunes empty rootIds and subContainer nodes
removeNonJCas(container);
if (container.candidates.size() == 0 && container.subContainers.size() == 0) { // above call
// might remove
// all candidates
Container parent = container.parent;
if (parent != null) {
// System.out.println("No Candidates found, removing container: " + container.toString() );
// // debug
// System.out.println("debug: " + container.rootOrig.toString());
parent.subContainers.remove(container);
}
return;
}
si(psb).append("Migrating JCas files ");
psb.append(container.isJar ? "in Jar: " : container.isPear ? "in Pear: " : "from root: ");
psb.append(container.rootOrig);
indent[0] += 2;
si(psb);
flush(psb);
try {
for (Path path : container.candidates) {
CommonConverted cc = getSource(path, container);
// migrate checks to see if already done, outputs a "." or some other char for the candidate
migrate(cc, container, path);
// defer any compilation to container level
if ((itemCount % 50) == 0) {
psb.append(" ").append(itemCount);
si(psb);
flush(psb);
}
itemCount++;
}
psb.append(" ").append(itemCount - 1);
flush(psb);
if (isSource) {
return; // done
}
if (!isSource && !container.haveDifferentCapitalizedNamesCollidingOnWindows
&& javaCompiler != null) {
boolean somethingCompiled = compileV3SourcesCommon2(container);
if (container.isPear || container.isJar) {
if (somethingCompiled) {
postProcessPearOrJar(container);
}
}
return;
}
unableToCompile = true;
return; // unable to do post processing or compiling
} finally {
indent[0] -= 2;
}
}
// removes nonJCas candidates
private void removeNonJCas(Container container) {
Iterator<Path> it = container.candidates.iterator();
while (it.hasNext()) {
String candidate = it.next().toString();
// remove non JCas classes
// //debug
// System.out.println("debug, testing to remove: " + candidate);
// if (candidate.indexOf("Corrected") >= 0) {
// if (!container._Types.contains(candidate)) {
// System.out.println("debug dumping _Types map keys to see why ... Corrected.class not
// there");
// System.out.println("debug key is=" + candidate);
// System.out.println("keys are:");
// int i = 0;
// for (String k : container._Types) {
// if (i == 4) {
// i = 0;
// System.out.println("");
// }
// System.out.print(k + ", ");
// }
// } else {
// System.out.println("debug container._Types did contain " + candidate);
// }
// }
if (!container.isSingleJavaSource && !container._Types.contains(candidate)) {
it.remove();
}
}
}
// @formatter:off
/**
* Called from Stream walker starting at a root or starting at an imbedded Jar or Pear.
*
* adds all the .java or .class files to the candidates, including _Type if not skipping the _Type check
*
* Handling embedded jar files
* - single level Jar (at the top level of the default file system)
* -- handle using an overlayed file system
* - embedded Jars within Jars:
* - not supported by Zip File System Provider (it only supports one level)
* - handle by extracting to a temp dir, and then using the Zip File System Provider
*
* For PEARs, check for and disallow nested PEARs; install the PEAR, set the pear classpath for
* recursive processing with the Pear.
*
* For Jar and PEAR files, use local variable + recursive call to update current_paths2RootIds map
* to new one for the Jar / Pear, and then process recursiveloy
*
* @param path the path to a .java or .class or .jar or .pear that was walked to
* @param pearClasspath - a string representing a path to the pear's classpath if there is one, or null
* @param container the container for the
* - rootIds (which have the JCas candidates) and
* - subContainers for imbedded Pears and Jars
*/
// @formatter:on
private void getCandidates_processFile2(Path path, Container container) {
String pathString = path.toString();
final boolean isPear = pathString.endsWith(".pear"); // path.endsWith does not mean this !!
final boolean isJar = pathString.endsWith(".jar");
if (isPear || isJar) {
Container subc = new Container(container, path);
getAndProcessCandidatesInContainer(subc);
return;
}
if (pathString.endsWith(isSource ? ".java" : ".class")) {
// Skip candidates except .java or .class
addToCandidates(path, container);
}
}
// @formatter:off
/**
* if _Type kind, add artifactId to set kept in current rootIdContainer
* If currently scanning within a PEAR,
* record 2-way map from unzipped path to internal path inside pear
* Used when doing pear reassembly.
*
* If currently scanning within a Jar or a PEAR,
* add unzipped path to list of all subparts for containing Jar or PEAR
* These paths are used as unique ids to things needing to be replaced in the Jar or PEAR,
* when doing re-assembly. For compiled classes migration, only, since source migration
* doesn't do re-assembly.
*
* @param path
* @param pearClassPath
*/
// @formatter:on
private void addToCandidates(Path path, Container container) {
String ps = path.toString();
if (ps.endsWith(isSource ? "_Type.java" : "_Type.class")) {
container._Types.add(isSource ? (ps.substring(0, ps.length() - 10) + ".java")
: (ps.substring(0, ps.length() - 11) + ".class"));
// if (container.isJar) {
// System.out.println("debug add container._Types " + Paths.get(ps.substring(0, ps.length() -
// 11)).toString() + ".class".toString() + " for Jar " +
// container.rootOrig.getFileName().toString());
// }
return;
}
if (ps.contains("$")) {
return; // don't add these kinds of things, they're not JCas classes
}
// debug
// if (container.isJar) {
// System.out.println("debug add candidate " + path.toString() + " for Jar " +
// container.rootOrig.getFileName().toString());
// }
container.candidates.add(path);
}
/**
* For Jars inside other Jars, we copy the Jar to a temp spot in the default file system Extracted
* Jar is marked delete-on-exit
*
* @param path
* embedded Jar to copy (only the last name is used, in constructing the temp dir)
* @return a temporary file in the local temp directory that is a copy of the Jar
* @throws IOException
* -
*/
private static Path getTempOutputPathForJarOrPear(Path path) throws IOException {
Path localTempDir = getTempDir();
if (path == null) {
throw new IllegalArgumentException();
}
Path fn = path.getFileName();
if (fn == null) {
throw new IllegalArgumentException();
}
Path tempPath = Files.createTempFile(localTempDir, fn.toString(), "");
tempPath.toFile().deleteOnExit();
return tempPath;
}
private static Path getTempDir() throws IOException {
if (tempDir == null) {
tempDir = Files.createTempDirectory("migrateJCas");
tempDir.toFile().deleteOnExit();
}
return tempDir;
}
private static final CommandLineParser createCmdLineParser() {
CommandLineParser parser = new CommandLineParser();
parser.addParameter(SOURCE_FILE_ROOTS, true);
parser.addParameter(CLASS_FILE_ROOTS, true);
parser.addParameter(OUTPUT_DIRECTORY, true);
// parser.addParameter(SKIP_TYPE_CHECK, false);
parser.addParameter(MIGRATE_CLASSPATH, true);
// parser.addParameter(CLASSES, true);
return parser;
}
private final boolean checkCmdLineSyntax(CommandLineParser clp) {
if (clp.getRestArgs().length > 0) {
System.err.println("Error parsing CVD command line: unknown argument(s):");
String[] args = clp.getRestArgs();
for (int i = 0; i < args.length; i++) {
System.err.print(" ");
System.err.print(args[i]);
}
System.err.println();
return false;
}
if (!clp.isInArgsList(SOURCE_FILE_ROOTS) && !clp.isInArgsList(CLASS_FILE_ROOTS)) {
System.err.println(
"Neither sources file roots nor classes file roots parameters specified; please specify just one.");
return false;
}
if (clp.isInArgsList(SOURCE_FILE_ROOTS) && clp.isInArgsList(CLASS_FILE_ROOTS)) {
System.err.println(
"both sources file roots and classes file roots parameters specified; please specify just one.");
return false;
}
if (clp.isInArgsList(OUTPUT_DIRECTORY)) {
outputDirectory = Paths.get(clp.getParamArgument(OUTPUT_DIRECTORY)).toString();
if (!outputDirectory.endsWith("/")) {
outputDirectory = outputDirectory + "/";
}
} else {
try {
outputDirectory = Files.createTempDirectory("migrateJCasOutput").toString() + "/";
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
outDirConverted = outputDirectory + "converted/";
outDirSkipped = outputDirectory + "not-converted/";
outDirLog = outputDirectory + "logs/";
if (clp.isInArgsList(MIGRATE_CLASSPATH)) {
migrateClasspath = clp.getParamArgument(MIGRATE_CLASSPATH);
} else {
if (clp.isInArgsList(CLASS_FILE_ROOTS)) {
System.err.println("WARNING: classes file roots is specified, but the\n"
+ " migrateClasspath parameter is missing\n");
}
}
// if (clp.isInArgsList(CLASSES)) {
// individualClasses = clp.getParamArgument(CLASSES);
// }
return true;
}
// called to decompile a string of bytes.
// - first get the class name (fully qualified)
// and skip decompiling if already decompiled this class
// for this pearClasspath
// - this handles multiple class definitions, insuring
// only one decompile happens per pearClasspath (including null)
/**
* Caller does any caching to avoid this method.
*
* @param b
* bytecode to decompile
* @param pearClasspath
* to prepend to the classpath
* @return
*/
private String decompile(byte[] b, String pearClasspath) {
badClassName = false;
String classNameWithSlashes = Misc.classNameFromByteCode(b);
packageAndClassNameSlash = classNameWithSlashes;
ClassLoader cl = getClassLoader(pearClasspath);
UimaDecompiler ud = new UimaDecompiler(cl, null);
if (classNameWithSlashes == null || classNameWithSlashes.length() < 2) {
System.err.println("Failed to extract class name from binary code, " + "name found was \""
+ ((classNameWithSlashes == null) ? "null" : classNameWithSlashes)
+ "\"\n byte array was:");
System.err.println(Misc.dumpByteArray(b, 2000));
badClassName = true;
}
return ud.decompileToString(classNameWithSlashes, b);
}
/**
* The classloader to use in decompiling, if it is provided, is one that delegates first to the
* parent. This may need fixing for PEARs
*
* @return classloader to use for migrate decompiling
*/
private ClassLoader getClassLoader(String pearClasspath) {
if (null == pearClasspath) {
if (null == cachedMigrateClassLoader) {
cachedMigrateClassLoader = (null == migrateClasspath) ? this.getClass().getClassLoader()
: new UIMAClassLoader(Misc.classpath2urls(migrateClasspath));
}
return cachedMigrateClassLoader;
} else {
try {
return new UIMAClassLoader((null == migrateClasspath) ? pearClasspath
: (pearClasspath + File.pathSeparator + migrateClasspath));
} catch (MalformedURLException e) {
throw new UIMARuntimeException(e);
}
}
}
private void addImport(String s) {
cu.getImports().add(new ImportDeclaration(new Name(s), false, false));
}
private void removeImport(String s) {
Iterator<ImportDeclaration> it = cu.getImports().iterator();
while (it.hasNext()) {
ImportDeclaration impDcl = it.next();
if (impDcl.getNameAsString().equals(s)) {
it.remove();
break;
}
}
}
/******************
* AST Utilities
******************/
private Node replaceInParent(Node n, Expression v) {
Optional<Node> maybeParent = n.getParentNode();
if (maybeParent.isPresent()) {
Node parent = n.getParentNode().get();
if (parent instanceof EnclosedExpr) {
((EnclosedExpr) parent).setInner(v);
} else if (parent instanceof MethodCallExpr) { // args in the arg list
List<Expression> args = ((MethodCallExpr) parent).getArguments();
args.set(args.indexOf(n), v);
v.setParentNode(parent);
} else if (parent instanceof ExpressionStmt) {
((ExpressionStmt) parent).setExpression(v);
} else if (parent instanceof CastExpr) {
((CastExpr) parent).setExpression(v);
} else if (parent instanceof ReturnStmt) {
((ReturnStmt) parent).setExpression(v);
} else if (parent instanceof AssignExpr) {
((AssignExpr) parent).setValue(v);
} else if (parent instanceof VariableDeclarator) {
((VariableDeclarator) parent).setInitializer(v);
} else if (parent instanceof ObjectCreationExpr) {
List<Expression> args = ((ObjectCreationExpr) parent).getArguments();
int i = args.indexOf(n);
if (i < 0)
throw new RuntimeException();
args.set(i, v);
} else {
System.out.println(parent.getClass().getName());
throw new RuntimeException();
}
return v;
}
System.out.println(
"internal error replacing in parent: no parent for node: " + n.getClass().getName());
System.out.println(" node: " + n.toString());
System.out.println(" expression replacing: " + v.toString());
throw new RuntimeException();
}
/**
*
* @param p
* the parameter to modify
* @param t
* the name of class or interface
* @param name
* the name of the variable
*/
private void setParameter(List<Parameter> ps, int i, String t, String name) {
Parameter p = ps.get(i);
p.setType(new ClassOrInterfaceType(t));
p.setName(new SimpleName(name));
}
private int findConstructor(NodeList<BodyDeclaration<?>> classMembers) {
int i = 0;
for (BodyDeclaration<?> bd : classMembers) {
if (bd instanceof ConstructorDeclaration) {
return i;
}
i++;
}
return -1;
}
private boolean hasTypeFields(NodeList<BodyDeclaration<?>> members) {
boolean hasType = false;
boolean hasTypeId = false;
for (BodyDeclaration<?> bd : members) {
if (bd instanceof FieldDeclaration) {
FieldDeclaration f = (FieldDeclaration) bd;
EnumSet<Modifier> m = f.getModifiers();
if (m.contains(Modifier.PUBLIC) && m.contains(Modifier.STATIC) && m.contains(Modifier.FINAL)
// &&
// getTypeName(f.getType()).equals("int")
) {
List<VariableDeclarator> vds = f.getVariables();
for (VariableDeclarator vd : vds) {
if (vd.getType().equals(intType)) {
String n = vd.getNameAsString();
if (n.equals("type"))
hasType = true;
if (n.equals("typeIndexID"))
hasTypeId = true;
if (hasTypeId && hasType) {
return true;
}
}
}
}
}
} // end of for
return false;
}
// @formatter:off
/**
* Heuristic:
* JCas classes have 0, 1, and 2 arg constructors with particular arg types
* 0 -
* 1 - JCas
* 2 - int, TOP_Type (v2) or TypeImpl, CASImpl (v3)
*
* Additional 1 and 2 arg constructors are permitted.
*
* Sets fields hasV2Constructors, hasV3Constructors
*
* @param members
*/
// @formatter:on
private void setHasJCasConstructors(NodeList<BodyDeclaration<?>> members) {
boolean has0ArgConstructor = false;
boolean has1ArgJCasConstructor = false;
boolean has2ArgJCasConstructorV2 = false;
boolean has2ArgJCasConstructorV3 = false;
for (BodyDeclaration<?> bd : members) {
if (bd instanceof ConstructorDeclaration) {
List<Parameter> ps = ((ConstructorDeclaration) bd).getParameters();
if (ps.size() == 0)
has0ArgConstructor = true;
if (ps.size() == 1 && getParmTypeName(ps, 0).equals("JCas")) {
has1ArgJCasConstructor = true;
}
if (ps.size() == 2) {
if (getParmTypeName(ps, 0).equals("int") && getParmTypeName(ps, 1).equals("TOP_Type")) {
has2ArgJCasConstructorV2 = true;
} else if (getParmTypeName(ps, 0).equals("TypeImpl")
&& getParmTypeName(ps, 1).equals("CASImpl")) {
has2ArgJCasConstructorV3 = true;
}
} // end of 2 arg constructor
} // end of is-constructor
} // end of for loop
hasV2Constructors = has0ArgConstructor && has1ArgJCasConstructor && has2ArgJCasConstructorV2;
hasV3Constructors = has0ArgConstructor && has1ArgJCasConstructor && has2ArgJCasConstructorV3;
}
private String getParmTypeName(List<Parameter> p, int i) {
return getTypeName(p.get(i).getType());
}
private String getTypeName(Type t) {
// if (t instanceof ReferenceType) {
// t = ((ReferenceType<?>)t).getType();
// }
if (t instanceof PrimitiveType) {
return ((PrimitiveType) t).toString();
}
if (t instanceof ClassOrInterfaceType) {
return ((ClassOrInterfaceType) t).getNameAsString();
}
Misc.internalError();
return null;
}
/**
* Get the name of a field
*
* @param e
* -
* @return the field name or null
*/
private String getName(Expression e) {
e = getUnenclosedExpr(e);
if (e instanceof NameExpr) {
return ((NameExpr) e).getNameAsString();
}
if (e instanceof FieldAccessExpr) {
return ((FieldAccessExpr) e).getNameAsString();
}
return null;
}
/**
* Called on Annotation Decl, Class/intfc decl, empty type decl, enum decl Does nothing unless at
* top level of compilation unit
*
* Otherwise, adds an entry to c2ps for the classname and package, plus full path
*
* @param n
* type being declared
*/
private void updateClassName(TypeDeclaration<?> n) {
Optional<Node> pnode = n.getParentNode();
Node node;
if (pnode.isPresent() && (node = pnode.get()) instanceof CompilationUnit) {
CompilationUnit cu2 = (CompilationUnit) node;
className = cu2.getType(0).getNameAsString();
String packageAndClassName = (className.contains(".")) ? className
: packageName + '.' + className;
packageAndClassNameSlash = packageAndClassName.replace('.', '/');
// assert current_cc.fqcn_slash == null; // for decompiling, already set
assert (current_cc.fqcn_slash != null)
? current_cc.fqcn_slash.equals(packageAndClassNameSlash)
: true;
current_cc.fqcn_slash = packageAndClassNameSlash;
TypeImpl ti = TypeSystemImpl.staticTsi
.getType(Misc.javaClassName2UimaTypeName(packageAndClassName));
if (null != ti) {
// is a built-in type
// ContainerAndPath p = new ContainerAndPath(
// current_path,
// current_container,packageAndClassNameSlash,
// current_cc.,
// current_cc.pearClasspath);
skippedBuiltins
.add(new PathContainerAndReason(current_path, current_container, "built-in"));
isBuiltinJCas = true;
isConvert2v3 = false;
return;
} else {
VariableDeclarator vd_typename = new VariableDeclarator(stringType, "_TypeName",
new StringLiteralExpr(packageAndClassName));
fi_fields.add(new FieldDeclaration(public_static_final, vd_typename));
}
return;
}
return;
}
private Expression getExpressionFromStmt(Statement stmt) {
stmt = getStmtFromStmt(stmt);
if (stmt instanceof ExpressionStmt) {
return getUnenclosedExpr(((ExpressionStmt) stmt).getExpression());
}
return null;
}
private Expression getUnenclosedExpr(Expression e) {
while (e instanceof EnclosedExpr) {
e = ((EnclosedExpr) e).getInner().get();
}
return e;
}
/**
* Unwrap (possibly nested) 1 statement blocks
*
* @param stmt
* -
* @return unwrapped (non- block) statement
*/
private Statement getStmtFromStmt(Statement stmt) {
while (stmt instanceof BlockStmt) {
NodeList<Statement> stmts = ((BlockStmt) stmt).getStatements();
if (stmts.size() == 1) {
stmt = stmts.get(0);
continue;
}
return null;
}
return stmt;
}
private void addCastExpr(Statement stmt, Type castType) {
ReturnStmt rstmt = (ReturnStmt) stmt;
Optional<Expression> o_expr = rstmt.getExpression();
Expression expr = o_expr.isPresent() ? o_expr.get() : null;
CastExpr ce = new CastExpr(castType, expr);
rstmt.setExpression(ce); // removes the parent link from expr
if (expr != null) {
expr.setParentNode(ce); // restore it
}
}
/********************
* Recording results
********************/
private void recordBadConstructor(String msg) {
reportMigrateFailed("Constructor is incorrect, " + msg);
}
// private void reportParseException() {
// reportMigrateFailed("Unparsable Java");
// }
private void migrationFailed(String reason) {
failedMigration.add(new PathContainerAndReason(current_path, current_container, reason));
isConvert2v3 = false;
}
private void reportMigrateFailed(String m) {
System.out.format("Skipping this file due to error: %s, path: %s%n", m, current_path);
migrationFailed(m);
}
private void reportV2Class() {
// v2JCasFiles.add(current_path);
isV2JCas = true;
}
private void reportV3Class() {
// v3JCasFiles.add(current_path);
isConvert2v3 = true;
}
private void reportNotJCasClass(String reason) {
nonJCasFiles.add(new PathContainerAndReason(current_path, current_container, reason));
isConvert2v3 = false;
}
private void reportNotJCasClassMissingTypeFields() {
reportNotJCasClass("missing required type and/or typeIndexID static fields");
}
private void reportDeletedCheckModified(String m) {
deletedCheckModified.add(new PathContainerAndReason(current_path, current_container, m));
}
private void reportMismatchedFeatureName(String m) {
manualInspection.add(new PathContainerAndReason(current_path, current_container,
"This getter/setter name doesn't match internal feature name: " + m));
}
private void reportUnrecognizedV2Code(String m) {
migrationFailed("V2 code not recognized:\n" + m);
}
private void reportPathWorkaround(String orig, String modified) {
pathWorkaround.add(new String1AndString2(orig, modified));
}
private void reportPearOrJarClassReplace(String pearOrJar, String classname, Container kind) { // pears
// or
// jars
if (kind.isPear) {
pearClassReplace.add(new String1AndString2(pearOrJar, classname));
} else {
jarClassReplace.add(new String1AndString2(pearOrJar, classname));
}
}
/***********************************************/
/**
* Output directory for source and migrated files Consisting of converted/skipped, v2/v3, a+cc.id,
* slashified classname
*
* @param cc
* -
* @param isV2
* -
* @param wasConverted
* -
* @return converted/skipped, v2/v3, a+cc.id, slashified classname
*/
private String getBaseOutputPath(CommonConverted cc, boolean isV2, boolean wasConverted) {
StringBuilder sb = new StringBuilder();
sb.append(wasConverted ? outDirConverted : outDirSkipped);
sb.append(isV2 ? "v2/" : "v3/");
sb.append("a").append(cc.getId()).append('/');
sb.append(cc.fqcn_slash).append(".java");
return sb.toString();
}
private void writeV2Orig(CommonConverted cc, boolean wasConverted) throws IOException {
String base = getBaseOutputPath(cc, true, wasConverted); // adds numeric suffix if dupls
FileUtils.writeToFile(makePath(base), cc.v2Source);
}
private void writeV3(CommonConverted cc) throws IOException {
String base = getBaseOutputPath(cc, false, true);
cc.v3SourcePath = makePath(base);
String data = fixImplementsBug(cc.v3Source);
FileUtils.writeToFile(cc.v3SourcePath, data);
}
private void printUsage() {
System.out.println("Usage: java org.apache.uima.migratev3.jcas.MigrateJCas \n"
+ " [-sourcesRoots <One-or-more-directories-or-jars-separated-by-Path-separator, or a path to a single JCas source class>]\n"
+ " [-classesRoots <One-or-more-directories-or-jars-or-pears-separated-by-Path-separator>]\n"
+ " [-outputDirectory a-writable-directory-path (optional)\n"
+ " if omitted, a temporary directory is used\n"
+ " if not omitted, the directory contents WILL BE ERASED at the start.\n"
+ " [-migrateClasspath a-class-path to use in decompiling, when -classesRoots is specified\n"
+ " also used when compiling the migrated classes.\n"
+ " NOTE: either -sourcesRoots or -classesRoots is required, but only one may be specified.\n"
+ " NOTE: classesRoots are scanned for JCas classes, which are then decompiled, and the results processed like sourcesRoots\n");
}
private static final Pattern implementsEmpty = Pattern.compile("implements \\{");
private String fixImplementsBug(String data) {
return implementsEmpty.matcher(data).replaceAll("{");
}
/*********************************************************************
* Reporting classes
*********************************************************************/
private static abstract class Report2<T, U> {
public abstract Comparable<T> getFirst(); // Eclipse on linux complained if not public, was OK
// on windows
public abstract Comparable<U> getSecond();
abstract int getFirstLength();
}
private static class PathContainerAndReason extends Report2<ContainerAndPath, String> {
final ContainerAndPath cap;
final String reason;
PathContainerAndReason(ContainerAndPath cap, String reason) {
this.cap = cap;
this.reason = reason;
}
PathContainerAndReason(Path path, Container container, String reason) {
this(new ContainerAndPath(path, container), reason);
}
@Override
public Comparable<ContainerAndPath> getFirst() {
return cap;
}
@Override
public Comparable<String> getSecond() {
return reason;
}
@Override
int getFirstLength() {
return cap.toString().length();
}
}
private static class String1AndString2 extends Report2<String, String> {
String s1;
String s2;
String1AndString2(String s1, String s2) {
this.s1 = s1;
this.s2 = s2;
}
@Override
public Comparable<String> getFirst() {
return s1;
}
@Override
public Comparable<String> getSecond() {
return s2;
}
@Override
int getFirstLength() {
return s1.toString().length();
}
}
private static void withIOX(Runnable_withException r) {
try {
r.run();
} catch (Exception e) {
throw new UIMARuntimeException(e);
}
}
private int findFirstCharDifferent(String s1, String s2) {
int s1l = s1.length();
int s2l = s2.length();
for (int i = 0;; i++) {
if (i == s1l || i == s2l) {
return i;
}
if (s1.charAt(i) != s2.charAt(i)) {
return i;
}
}
}
// private String drop_Type(String s) {
// return s.substring(0, isSource ? "_Type.java".length()
// : "_Type.class".length()) +
// (isSource ? ".java" : ".class");
// }
/// *****************
// * Root-id
// *****************/
// private static int nextRootId = 1;
//
/// ***********************************************************************
// * Root-id - this is the path part up to the start of the package name.
// * - it is relative to container
// * - has the collection of artifacts that might be candidates, having this rootId
// * - has the collection of _Type things having this rootId
// * - "null" path is OK - means package name starts immediately
// * There is no Root-id for path ending in Jar or PEAR - these created containers instead
// ***********************************************************************/
// private static class RootId {
// final int id = nextRootId++;
// /**
// * The path relative to the the container (if any) (= Jar or Pear)
// * - for Pears, the path is as if it was not installed, but within the PEAR file
// */
// final Path path;
//
// /** The container holding this RootId */
// final Container container;
// /**
// * For this rootId, all of the fully qualified classnames that are migration eligible.
// * - not all might be migrated, if upon further inspection they are not JCas class files.
// */
// final Set<Fqcn> fqcns = new HashSet<>();
// final Set<String> fqcns_ignore_case = new HashSet<>();
// boolean haveDifferentCapitalizedNamesCollidingOnWindows = false;
//
// RootId(Path path, Container container) {
// this.path = path;
// this.container = container;
// }
//
// /* (non-Javadoc)
// * @see java.lang.Object#toString()
// */
// @Override
// public String toString() {
// return "RootId [id="
// + id
// + ", path="
// + path
// + ", container="
// + container.id
// + ", fqcns="
// + Misc.ppList(Misc.setAsList(fqcns))
// + ", fqcns_Type="
// + Misc.ppList(Misc.setAsList(fqcns_Type))
// + "]";
// }
//
// void add(Fqcn fqcn) {
// boolean wasNotPresent = fqcns.add(fqcn);
// boolean lc = fqcns_ignore_case.add(fqcn.fqcn_dots.toLowerCase());
// if (!lc && wasNotPresent) {
// haveDifferentCapitalizedNamesCollidingOnWindows = true;
// }
// }
//
// boolean hasMatching_Type(Fqcn fqcn) {
//
// }
// }
/// **
// * Called from Stream walker starting at a root or starting at an imbedded Jar or Pear.
// *
// * adds all the .java or .class files to the candidates, including _Type if not skipping the
/// _Type check
// * Handling embedded jar files
// * - single level Jar (at the top level of the default file system)
// * -- handle using an overlayed file system
// * - embedded Jars within Jars:
// * - not supported by Zip File System Provider (it only supports one level)
// * - handle by extracting to a temp dir, and then using the Zip File System Provider
// * @param path the path to a .java or .class or .jar or .pear
// * @param pearClasspath - a string representing a path to the pear's classpath if there is one,
/// or null
// */
// private void getCandidates_processFile(Path path, String pearClasspath) {
//// if (path.toString().contains("commons-httpclient-3.1.jar"))
//// System.out.println("Debug: " + path.toString());
//// System.out.println("debug processing " + path);
// try {
//// URI pathUri = path.toUri();
// String pathString = path.toString();
// final boolean isPear = pathString.endsWith(".pear"); // path.endsWith does not mean this !!
// final boolean isJar = pathString.endsWith(".jar");
//
// if (isJar || isPear) {
// if (!path.getFileSystem().equals(FileSystems.getDefault())) {
// // embedded Pear or Jar: extract to temp
// Path out = getTempOutputPathForJar(path);
// Files.copy(path, out, StandardCopyOption.REPLACE_EXISTING);
//// embeddedJars.add(new PathAndPath(path, out));
// path = out; // path points to pear or jar
// }
//
// Path start;
// final String localPearClasspath;
// if (isPear) {
// if (pearClasspath != null) {
// throw new UIMARuntimeException("Nested PEAR files not supported");
// }
//
//// pear_current = new PearOrJar(path);
//// pears.add(pear_current);
// // add pear classpath info
// File pearInstallDir = Files.createTempDirectory(getTempDir(), "installedPear").toFile();
// PackageBrowser ip = PackageInstaller.installPackage(pearInstallDir, path.toFile(), false);
// localPearClasspath = ip.buildComponentClassPath();
// String[] children = pearInstallDir.list();
// if (children == null || children.length != 1) {
// Misc.internalError();
// }
// pearResolveStart = Paths.get(pearInstallDir.getAbsolutePath(), children[0]);
//
// start = pearInstallDir.toPath();
// } else {
// if (isJar) {
// PearOrJar jarInfo = new PearOrJar(path);
// pear_or_jar_current_stack.push(jarInfo);
// jars.add(jarInfo);
// }
//
// localPearClasspath = pearClasspath;
// FileSystem jfs = FileSystems.newFileSystem(Paths.get(path.toUri()), null);
// start = jfs.getPath("/");
// }
//
// try (Stream<Path> stream = Files.walk(start)) { // needed to release file handles
// stream.forEachOrdered(
// p -> getCandidates_processFile(p, localPearClasspath));
// }
// if (isJar) {
// pear_or_jar_current_stack.pop();
// }
// if (isPear) {
// pear_current = null;
// }
// } else {
// // is not a .jar or .pear file. add .java or .class files to initial candidate set
// // will be filtered additionally later
//// System.out.println("debug path ends with java or class " + pathString.endsWith(isSource ?
/// ".java" : ".class") + " " + pathString);
// if (pathString.endsWith(isSource ? ".java" : ".class")) {
// candidates.add(new Candidate(path, pearClasspath));
// if (!isSource && null != pear_current) {
// // inside a pear, which has been unzipped into pearInstallDir;
// path2InsidePearOrJarPath.put(path.toString(), pearResolveStart.relativize(path).toString());
// pear_current.pathsToCandidateFiles.add(path.toString());
// }
//
// if (!isSource && pear_or_jar_current_stack.size() > 0) {
// // inside a jar, not contained in a pear
// pear_or_jar_current_stack.getFirst().pathsToCandidateFiles.add(path.toString());
// }
// }
// }
// } catch (IOException e) {
// throw new RuntimeException(e);
// }
// }
// private void postProcessPearsOrJars(String kind, List<PearOrJar> pearsOrJars,
/// List<String1AndString2> classReplace) { // pears or jars
// try {
// Path outDir = Paths.get(outputDirectory, kind);
// FileUtils.deleteRecursive(outDir.toFile());
// Files.createDirectories(outDir);
// } catch (IOException e) {
// throw new RuntimeException(e);
// }
//
//// pearsOrJars may have entries with 0 candidate paths. This happens when we scan them
//// but find nothing to convert.
//// eliminate these.
//
// Iterator<PearOrJar> it = pearsOrJars.iterator();
// while (it.hasNext()) {
// PearOrJar poj = it.next();
// if (poj.pathsToCandidateFiles.size() == 0) {
// it.remove();
// } else {
//// //debug
//// if (poj.pathToPearOrJar.toString().contains("commons-httpclient-3.1")) {
//// System.err.println("debug found converted things inside commons-httpclient");;
//// for (String x : poj.pathsToCandidateFiles) {
//// System.err.println(x);
//// }
//// System.err.println("");
//// }
// }
// }
//
// it = pearsOrJars.iterator();
// while (it.hasNext()) {
// PearOrJar poj = it.next();
// if (poj.pathsToCandidateFiles.size() == 0) {
// System.err.print("debug failed to remove unconverted Jar");
// }
// }
//
// if (pearsOrJars.size() == 0) {
// System.out.format("No .class files were replaced in %s.%n", kind);
// } else {
// System.out.format("replacing .class files in %,d %s%n", pearsOrJars.size(), kind);
// for (PearOrJar p : pearsOrJars) {
// pearOrJarPostProcessing(p, kind);
// }
// try {
// reportPaths("Reports of updated " + kind, kind + "FileUpdates.txt", classReplace);
//
// } catch (IOException e) {
// throw new RuntimeException(e);
// }
// }
//
// }
/// **
// * When running the compiler to compile v3 sources, we need a classpath that at a minimum
// * includes uimaj-core. The strategy is to use the invoker of this tool's classpath as
// * specified from the application class loader
// * @return true if no errors
// */
// private boolean compileV3SourcesCommon(List<ClassnameAndPath> items, String msg, String
/// pearClasspath) {
//
// if (items.size() == 0) {
// return true;
// }
// System.out.format("Compiling %,d classes %s -- This may take a while!%n", c2ps.size(), msg);
// StandardJavaFileManager fileManager = javaCompiler.getStandardFileManager(null, null,
/// Charset.forName("UTF-8"));
//
// List<String> cus = items.stream()
// .map(c -> outDirConverted + "v3/" + c.classname + ".java")
// .collect(Collectors.toList());
//
// Iterable<String> compilationUnitStrings = cus;
//
// Iterable<? extends JavaFileObject> compilationUnits =
// fileManager.getJavaFileObjectsFromStrings(compilationUnitStrings);
//
// // specify where the output classes go
// String classesBaseDir = outDirConverted + "v3-classes";
// try {
// Files.createDirectories(Paths.get(classesBaseDir));
// } catch (IOException e) {
// throw new UIMARuntimeException(e);
// }
// // specify the classpath
// String classpath = getCompileClassPath(pearClasspath);
// Iterable<String> options = Arrays.asList("-d", classesBaseDir,
// "-classpath", classpath);
// return javaCompiler.getTask(null, fileManager, null, options, null, compilationUnits).call();
// }
/// **
// * Called after class is migrated
// * Given a path to a class (source or class file),
// * return the URL to the class as found in the classpath.
// * This returns the "first" one found in the classpath, in the case of duplicates.
// * @param path
// * @return the location of the class in the class path
// */
// private URL getPathForClass(Path path) {
// return (null == packageAndClassNameSlash)
// ? null
// : migrateClassLoader.getResource(packageAndClassNameSlash + ".class");
// }
// private void getBaseOutputPath() {
// String s = packageAndClassNameSlash;
// int i = 0;
// while (!usedPackageAndClassNames.add(s)) {
// i = i + 1;
// s = packageAndClassNameSlash + "_dupid_" + i;
// }
// packageAndClassNameSlash_i = i;
// }
// private String prepareIndividual(String classname) {
// candidate = new Candidate(Paths.get(classname)); // a pseudo path
// packageName = null;
// className = null;
// packageAndClassNameSlash = null;
// cu = null;
// return decompile(classname); // always look up in classpath
// // to decompile individual source - put in sourcesRoots
// }
// if (!isSource) { // skip this recording if source
// if (null != pear_current) {
// // inside a pear, which has been unzipped into a temporary pearInstallDir;
// // we don't want that temporary dir to be part of the path.
// path2InsidePearOrJarPath.put(path.toString(), pearResolveStart.relativize(path).toString());
// pear_current.pathsToCandidateFiles.add(path.toString());
// }
//
// if (!isSource && pear_or_jar_current_stack.size() > 0) {
// // inside a jar, not contained in a pear
// pear_or_jar_current_stack.getFirst().pathsToCandidateFiles.add(path.toString());
// }
// }
// }
/// **
// * For a given candidate, use its path:
// * switch the ...java to ..._Type.java, or ...class to ..._Type.class
// * look thru all the candidates
// * @param cand
// * @param start
// * @return
// */
// private boolean has_Type(Candidate cand, int start) {
// if (start >= candidates.size()) {
// return false;
// }
//
// String sc = cand.p.toString();
// String sc_minus_suffix = sc.substring(0, sc.length() - ( isSource ? ".java".length() :
/// ".class".length()));
// String sc_Type = sc_minus_suffix + ( isSource ? "_Type.java" : "_Type.class");
// // a string which sorts beyond the candidate + a suffix of "_"
// String s_end = sc_minus_suffix + (char) (((int)'_') + 1);
// for (Candidate c : candidates.subList(start, candidates.size())) {
// String s = c.p.toString();
// if (s_end.compareTo(s) < 0) {
// return false; // not found, we're already beyond where it would be found
// }
// if (s.equals(sc_Type)) {
// return true;
// }
// }
// return false;
// }
// private final static Comparator<Candidate> pathComparator = new Comparator<Candidate>() {
// @Override
// public int compare(Candidate o1, Candidate o2) {
// return o1.p.toString().compareTo(o2.p.toString());
// }
// };
//// there may be several same-name roots not quite right
//// xxx_Type$1.class
//
// private void addIfPreviousIsSameName(List<Path> c, int i) {
// if (i == 0) return;
// String _Type = candidates.get(i).toString();
//// String prev = r.get(i-1).getPath();
// String prefix = _Type.substring(0, _Type.length() - ("_Type." + (isSource ? "java" :
//// "class")).length());
// i--;
// while (i >= 0) {
// String s = candidates.get(i).toString();
// if ( ! s.startsWith(prefix)) {
// break;
// }
// if (s.substring(prefix.length()).equals((isSource ? ".java" : ".class"))) {
// c.add(candidates.get(i));
// break;
// }
// i--;
// }
// }
//
// for (int i = 0; i < pearOrJar.pathsToCandidateFiles.size(); i++) {
// String candidatePath = pearOrJar.pathsToCandidateFiles.get(i);
// String path_in_v3_classes = isPear
// ? getPath_in_v3_classes(candidatePath)
// : candidatePath;
//
// Path src = Paths.get(outputDirectory, "converted/v3-classes", path_in_v3_classes
// + (isPear ? ".class" : ""));
// Path tgt = pfs.getPath(
// "/",
// isPear
// ? path2InsidePearOrJarPath.get(candidatePath) // needs to be bin/org/... etc
// : candidatePath); // needs to be org/... etc
// if (Files.exists(src)) {
// Files.copy(src, tgt, StandardCopyOption.REPLACE_EXISTING);
// reportPearOrJarClassReplace(pearOrJarCopy.toString(), path_in_v3_classes, kind);
// }
// }
/// ** for compiled mode, do recompiling and reassembly of Jars and Pears */
//
// private boolean compileAndReassemble(CommonConverted cc, Container container, Path path) {
// boolean noErrors = true;
// if (javaCompiler != null) {
// if (container.haveDifferentCapitalizedNamesCollidingOnWindows) {
// System.out.println("Skipping compiling / reassembly because class " + container.toString() + "
/// has multiple names differing only in capitalization, please resolve first.");
// } else {
//
//
// noErrors = compileV3PearSources(container, path);
// noErrors = noErrors && compileV3NonPearSources(container, path);
//
// postProcessPearsOrJars("jars" , jars , jarClassReplace);
// postProcessPearsOrJars("pears", pears, pearClassReplace);
//
////
//// try {
//// Path pearOutDir = Paths.get(outputDirectory, "pears");
//// FileUtils.deleteRecursive(pearOutDir.toFile());
//// Files.createDirectories(pearOutDir);
//// } catch (IOException e) {
//// throw new RuntimeException(e);
//// }
////
//// System.out.format("replacing .class files in %,d PEARs%n", pears.size());
//// for (PearOrJar p : pears) {
//// pearOrJarPostProcessing(p);
//// }
//// try {
//// reportPaths("Reports of updated Pears", "pearFileUpdates.txt", pearClassReplace);
//// } catch (IOException e) {
//// throw new RuntimeException(e);
//// }
// }
// }
//
// return noErrors;
// }
/// **
// * @return true if no errors
// */
// private boolean compileV3PearSources() {
// boolean noError = true;
// Map<String, List<ClassnameAndPath>> p2c = c2ps.stream()
// .filter(c -> c.pearClasspath != null)
// .collect(Collectors.groupingBy(c -> c.pearClasspath));
//
// List<Entry<String, List<ClassnameAndPath>>> ea = p2c.entrySet().stream()
// .sorted(Comparator.comparing(Entry::getKey)) //(e1, e2) -> e1.getKey().compareTo(e2.getKey())
// .collect(Collectors.toList());
//
// for (Entry<String, List<ClassnameAndPath>> e : ea) {
// noError = noError && compileV3SourcesCommon(e.getValue(), "for Pear " + e.getKey(), e.getKey()
/// );
// }
// return noError;
// }
//
/// **
// * @return true if no errors
// */
// private boolean compileV3NonPearSources() {
//
// List<ClassnameAndPath> cnps = c2ps.stream()
// .filter(c -> c.pearClasspath == null)
// .collect(Collectors.toList());
//
// return compileV3SourcesCommon(cnps, "(non PEAR)", null);
// }
/// **
// * @param pathInPear a complete path to a class inside an (installed) pear
// * @return the part starting after the top node of the install dir
// */
// private String getPath_in_v3_classes(String pathInPear) {
// return path2classname.get(pathInPear);
// }
// private boolean reportDuplicates() throws IOException {
// List<List<CommonConverted>> nonIdenticals = new ArrayList<>();
// List<CommonConverted> onlyIdenticals = new ArrayList<>();
//
// classname2multiSources.forEach(
// (classname, ccs) -> {
// if (ccs.size() > 1) {
// nonIdenticals.add(ccs);
// } else {
// CommonConverted cc = ccs.get(0);
// if (cc.containersAndV2Paths.size() > 1)
// onlyIdenticals.add(cc); // the same item in multiple containers and/or paths
// }
// }
// );
//
// if (nonIdenticals.size() == 0) {
// if (onlyIdenticals.size() == 0) {
// System.out.println("There were no duplicates found.");
// } else {
// // report identical duplicates
// try (BufferedWriter bw = Files.newBufferedWriter(makePath(outDirLog +
// "identical_duplicates.txt"), StandardOpenOption.CREATE)) {
// logPrintNl("Report of Identical duplicates:", bw);
// for (CommonConverted cc : onlyIdenticals) {
// int i = 0;
// logPrintNl("Class: " + cc.fqcn_slash, bw);
// for (ContainerAndPath cp : cc.containersAndV2Paths) {
// logPrintNl(" " + (++i) + " " + cp, bw);
// }
// logPrintNl("", bw);
// }
// }
// }
// return true;
// }
//
//// non-identicals, print out all of them
// try (BufferedWriter bw = Files.newBufferedWriter(makePath(outDirLog +
// "nonIdentical_duplicates.txt"), StandardOpenOption.CREATE)) {
// logPrintNl("Report of non-identical duplicates", bw);
// for (List<CommonConverted> nonIdentical : nonIdenticals) {
// String fqcn = nonIdentical.get(0).fqcn_slash;
// logPrintNl(" classname: " + fqcn, bw);
// int i = 1;
// // for each cc, and within each cc, for each containerAndPath
// for (CommonConverted cc : nonIdentical) {
//// logPrintNl(" version " + i, bw);
// assert fqcn.equals(cc.fqcn_slash);
// int j = 1;
// boolean isSame = cc.containersAndV2Paths.size() > 1;
// boolean isFirstTime = true;
// for (ContainerAndPath cp : cc.containersAndV2Paths) {
// String first = isSame && isFirstTime
// ? " same: "
// : isSame
// ? " "
// : " ";
// isFirstTime = false;
// logPrintNl(first + i + "." + (j++) + " " + cp, bw);
// }
// indent[0] -= 6;
//// logPrintNl("", bw);
// i++;
// }
//// logPrintNl("", bw);
// }
// }
// return false;
// }
// private static class PathAndReason extends Report2<Path, String> {
// Path path;
// String reason;
// PathAndReason(Path path, String reason) {
// this.path = path;
// this.reason = reason;
// }
// @Override
// public Comparable<Path> getFirst() { return path; }
// @Override
// public Comparable<String> getSecond() { return reason; }
// @Override
// int getFirstLength() { return path.toString().length(); }
// }
}
| uimaj-v3migration-jcas/src/main/java/org/apache/uima/migratev3/jcas/MigrateJCas.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.uima.migratev3.jcas;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.io.StringReader;
import java.io.Writer;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.charset.Charset;
import java.nio.file.FileAlreadyExistsException;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.FileVisitOption;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Stream;
import javax.tools.Diagnostic;
import javax.tools.DiagnosticCollector;
import javax.tools.JavaCompiler;
import javax.tools.JavaFileObject;
import javax.tools.StandardJavaFileManager;
import javax.tools.ToolProvider;
import org.apache.uima.UIMARuntimeException;
import org.apache.uima.cas.impl.TypeImpl;
import org.apache.uima.cas.impl.TypeSystemImpl;
import org.apache.uima.cas.impl.UimaDecompiler;
import org.apache.uima.internal.util.CommandLineParser;
import org.apache.uima.internal.util.Misc;
import org.apache.uima.internal.util.UIMAClassLoader;
import org.apache.uima.internal.util.function.Runnable_withException;
import org.apache.uima.pear.tools.PackageBrowser;
import org.apache.uima.pear.tools.PackageInstaller;
import org.apache.uima.util.FileUtils;
import com.github.javaparser.JavaParser;
import com.github.javaparser.ast.CompilationUnit;
import com.github.javaparser.ast.ImportDeclaration;
import com.github.javaparser.ast.Modifier;
import com.github.javaparser.ast.Node;
import com.github.javaparser.ast.NodeList;
import com.github.javaparser.ast.PackageDeclaration;
import com.github.javaparser.ast.body.AnnotationDeclaration;
import com.github.javaparser.ast.body.BodyDeclaration;
import com.github.javaparser.ast.body.ClassOrInterfaceDeclaration;
import com.github.javaparser.ast.body.ConstructorDeclaration;
import com.github.javaparser.ast.body.EnumDeclaration;
import com.github.javaparser.ast.body.FieldDeclaration;
import com.github.javaparser.ast.body.MethodDeclaration;
import com.github.javaparser.ast.body.Parameter;
import com.github.javaparser.ast.body.TypeDeclaration;
import com.github.javaparser.ast.body.VariableDeclarator;
import com.github.javaparser.ast.comments.Comment;
import com.github.javaparser.ast.expr.AssignExpr;
import com.github.javaparser.ast.expr.BinaryExpr;
import com.github.javaparser.ast.expr.CastExpr;
import com.github.javaparser.ast.expr.EnclosedExpr;
import com.github.javaparser.ast.expr.Expression;
import com.github.javaparser.ast.expr.FieldAccessExpr;
import com.github.javaparser.ast.expr.MethodCallExpr;
import com.github.javaparser.ast.expr.Name;
import com.github.javaparser.ast.expr.NameExpr;
import com.github.javaparser.ast.expr.NullLiteralExpr;
import com.github.javaparser.ast.expr.ObjectCreationExpr;
import com.github.javaparser.ast.expr.SimpleName;
import com.github.javaparser.ast.expr.StringLiteralExpr;
import com.github.javaparser.ast.stmt.BlockStmt;
import com.github.javaparser.ast.stmt.EmptyStmt;
import com.github.javaparser.ast.stmt.ExplicitConstructorInvocationStmt;
import com.github.javaparser.ast.stmt.ExpressionStmt;
import com.github.javaparser.ast.stmt.IfStmt;
import com.github.javaparser.ast.stmt.ReturnStmt;
import com.github.javaparser.ast.stmt.Statement;
import com.github.javaparser.ast.type.ClassOrInterfaceType;
import com.github.javaparser.ast.type.PrimitiveType;
import com.github.javaparser.ast.type.Type;
import com.github.javaparser.ast.visitor.VoidVisitorAdapter;
import com.github.javaparser.printer.PrettyPrinter;
import com.github.javaparser.printer.PrettyPrinterConfiguration;
/**
* <p>A driver that scans given roots for source and/or class Java files that contain JCas classes
*
* <ul><li>identifies which ones appear to be JCas classes (heuristic)
* <ul><li>of these, identifies which ones appear to be v2
* <ul><li>converts these to v3</li></ul></li></ul>
*
* <li>also can receive a list of individual class names</li>
* <li>also can do a single source file</li>
* </ul>
*
* <p>Creates summary and detailed reports of its actions.
*
* <p>Files representing JCas classes to convert are discovered by walking file system
* directories from various roots, specified as input. The tool operates in 1 of two exclusive
* "modes": migrating from sources (e.g., .java files) and migrating using compiled classes.
*
* <p>Compiled classes are decompiled and then migrated. This decompilation step usually
* requires a java classpath, which is supplied using the -migrateClasspath parameter.
* Exception: migrating PEAR files, which contain their own specification for a classpath.
*
* <p>The same JCas class may be encountered multiple
* times while walking the directory tree from the roots,
* with the same or different definition. All of these definitions are migrated.
*
* <p>Copies of the original and the converted files are put into the output file tree.
*
* <p>Directory structure, starting at -outputDirectory (which if not specified, is a new temp directory).
* The "a0", "a1" represent non-identical alternative definitions for the same class.
* <pre>
* converted/
* v2/ these are the decompiled or "found" source files
* a0/x/y/z/javapath/.../Classname.java root-id + fully qualified java class + package as slashified name
* /Classname2.java etc.
* a1/x/y/z/javapath/.../Classname.java if there are different root-ids
* ...
* v3/
* a0/x/y/z/javapath/.../Classname.java fully qualified java class + package as slashified name
* /Classname2.java etc.
* a1/x/y/z/javapath/.../Classname.java if there are different root-ids
* ...
*
* v3-classes - the compiled form if from classes and a java compiler was available
* The first directory is the id of the Jar or PEAR container.
* The second directory is the alternative.
*
* 23/a0/fully/slashified/package/class-name.class << parallel structure as v3/
*
* jars/ - copies of the original JARs with the converted JCas classes
* The first directory is the id of the Jar or PEAR container
* 7/jar-file-name-last-part.jar
* 12/jar-file-name-last-part.jar
* 14/ etc.
*
* pears - copies of the original PEARs with the converted JCas classes, if there were no duplicates
* 8/pear-file-name-last-art.pear
* 9/ etc.
*
* not-converted/ (skipped)
* logs/
* jar-map.txt list of index to paths
* pear-map.txt list of index to paths
* processed.txt
* duplicates.txt
* builtinsNotExtended.txt
* failed.txt
* skippedBuiltins.txt
* nonJCasFiles.txt
* woraroundDir.txt
* deletedCheckModified.txt
* manualInspection.txt
* pearFileUpdates.txt
* jarFileUpdates.txt
* ...
* </pre>
*
* <p>Operates in one of two modes:
* <pre>
* Mode 1: Given classes-roots and/or individual class names, and a migrateClasspath,
* scans the classes-routes looking for classes candidates
* - determines the class name,
* - decompiles that
* - migrates that decompiled source.
*
* if a Java compiler (JDK) is available,
* - compiles the results
* - does reassembly for Jars and PEARs, replacing the JCas classes.
*
* Mode 2: Given sources-roots or a single source java file
* scans the sources-routes looking for candidates
* - migrates that decompiled source.
* </pre>
*
* <p>Note: Each run clears the output directory before starting the migration.
*
* <p>Note: classpath may be specified using -migrateClassPath or as the class path used to run this tool.
*/
public class MigrateJCas extends VoidVisitorAdapter<Object> {
/* *****************************************************
* Internals
*
* Unique IDs of v2 and v3 artifacts:
* RootId + classname
*
* RootIdContainers (Set<RootId>) hold all discovered rootIds, at each Jar/Pear nesting level
* including outer level (no Jar/Pear).
* These are kept in a push-down stack
*
*
* Processing roots collection: done for source or class
* - iterate, for all roots
* -- processCollection for candidates rooted at that root
* --- candidate is .java or .class, with path, with pearClasspath string
* ---- migrate called on each candidate
* ----- check to see if already done, and if so, skip.
* ------ means: same byte or source code associated with same fqcn
*
* Root-ids: created for each unique pathpart in front of fully-qualified class name
* created for each unique path to Jar or PEAR
*
* Caching to speed up duplicate processing:
* - decompiling: if the byte[] is already done, use other value (if augmented migrateClasspath is the same)
* - source-migrating: if the source strings are the same.
*
* Multiple sources for single class:
* classname2multiSources: TreeMap from fqcn to CommonConverted (string or bytes)
* CommonConverted: supports multiple paths having identical string/bytes.
*
* Compiling: driven from c2ps array of fqcn, path
* - may have multiple entries for same fqcn, with different paths,
* -- only if different values for the impl
* - set when visiting top-level compilation unit non-built-in type
*
*/
/** manange the indention of printing routines */
private static final int[] indent = new int[1];
private static StringBuilder si(StringBuilder sb) { return Misc.indent(sb, indent); }
private static StringBuilder flush(StringBuilder sb) {
System.out.print(sb);
sb.setLength(0);
return sb;
}
private static final Integer INTEGER0 = 0;
private static int nextContainerId = 0;
/******************************************************************
* Container - exists in tree structure, has super, sub containers
* -- subcontainers: has path to it
* - holds set of rootIds in that container
* - topmost one has null parent, and null pathToJarOrPear
******************************************************************/
private static class Container implements Comparable<Container> {
final int id = nextContainerId++;
final Container parent; // null if at top level
/** root to scan from.
* Pears: is the loc in temp space of installed pear
* Jars: is the file system mounted on the Jar
* -- for inner Jars, the Jar is copied out into temp space. */
Path root;
final Path rootOrig; // for Jars and Pears, the original path ending in jar or pear
final Set<Container> subContainers = new TreeSet<>(); // tree set for better ordering
final List<Path> candidates = new ArrayList<>();
final List<CommonConverted> convertedItems = new ArrayList<>();
final List<V3CompiledPathAndContainerItemPath> v3CompiledPathAndContainerItemPath = new ArrayList<>();
final boolean isPear;
final boolean isJar;
final boolean isSingleJavaSource;
/** can't use Path as the type, because the equals for Path is object == */
final Set<String> _Types = new HashSet<>(); // has the non_Type path only if the _Type is found
boolean haveDifferentCapitalizedNamesCollidingOnWindows = false;
String pearClasspath; // not final - set by subroutine after defaulting
/** Cache of already done compiled classes, to avoid redoing them
* Kept by container, because the classpath could change the decompile */
private Map<byte[], CommonConverted> origBytesToCommonConverted = new HashMap<>();
Container(Container parent, Path root) {
this.parent = parent;
if (parent != null) {
parent.subContainers.add(this);
this.pearClasspath = parent.pearClasspath; // default, when expanding Jars.
}
this.rootOrig = root;
String s = root.toString().toLowerCase();
isJar = s.endsWith(".jar");
isPear = s.endsWith(".pear");
isSingleJavaSource = s.endsWith(".java");
this.root = (isPear || isJar)
? installJarOrPear()
: root;
// // debug
// if (!isPear && isJar) {
// System.out.println("debug prepare jar: " + this);
// }
}
/**
* Called when a new container is created
* @param container
* @param path
* @return install directory
*/
private Path installJarOrPear() {
try {
Path theJarOrPear = rootOrig;
if (!theJarOrPear.getFileSystem().equals(FileSystems.getDefault())) {
// pear is embedded in another pear or jar, so copy the Jar (intact) to a temp spot so it's no longer embedded
theJarOrPear = getTempOutputPathForJarOrPear(theJarOrPear);
Files.copy(rootOrig, theJarOrPear, StandardCopyOption.REPLACE_EXISTING);
}
if (isPear) {
// extract the pear just to get the classpath
File pearInstallDir = Files.createTempDirectory(getTempDir(), "installedPear").toFile();
PackageBrowser ip = PackageInstaller.installPackage(pearInstallDir, rootOrig.toFile(), false);
String newClasspath = ip.buildComponentClassPath();
String parentClasspath = parent.pearClasspath;
this.pearClasspath = (null == parentClasspath || 0 == parentClasspath.length())
? newClasspath
: newClasspath + File.pathSeparator + parentClasspath;
}
FileSystem pfs = FileSystems.newFileSystem(theJarOrPear, (ClassLoader) null);
return pfs.getPath("/");
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = toString1();
indent[0] += 2;
try {
si(sb); // new line + indent
sb.append("subContainers=");
Misc.addElementsToStringBuilder(indent, sb, Misc.setAsList(subContainers), -1, (sbx, i) -> sbx.append(i.id)).append(',');
si(sb).append("paths migrated="); // new line + indent
Misc.addElementsToStringBuilder(indent, sb, candidates, -1, StringBuilder::append).append(',');
// si(sb).append("v3CompilePath="); // new line + indent
// Misc.addElementsToStringBuilder(indent, sb, v3CompiledPathAndContainerItemPath, 100, StringBuilder::append);
} finally {
indent[0] -=2;
si(sb).append(']');
}
return sb.toString();
}
public StringBuilder toString1() {
StringBuilder sb = new StringBuilder();
si(sb); // initial nl and indentation
sb.append(isJar ? "Jar " : isPear ? "PEAR " : "");
sb.append("container [id=").append(id)
.append(", parent.id=").append((null == parent) ? "null" : parent.id)
.append(", root or pathToJarOrPear=").append(rootOrig).append(',');
return sb;
}
/* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
return 31 * id;
}
/* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Container other = (Container) obj;
if (id != other.id)
return false;
return true;
}
@Override
public int compareTo(Container o) {
return Integer.compare(id, o.id);
}
}
/**
* A path to a .java or .class file in some container, for the v2 version
* For Jars and Pears, the path is relative to the zip "/" dir
*/
private static class ContainerAndPath implements Comparable<ContainerAndPath> {
final Path path;
final Container container;
ContainerAndPath(Path path, Container container) {
this.path = path;
this.container = container;
}
/* (non-Javadoc)
* @see java.lang.Comparable#compareTo(java.lang.Object)
*/
@Override
public int compareTo(ContainerAndPath o) {
int r = path.compareTo(o.path);
if (r != 0) {
return r;
}
return Integer.compare(container.id, o.container.id);
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("ContainerAndPath [path=").append(path).append(", container=").append(container.id).append("]");
return sb.toString();
}
}
/**
* This class holds information used to replace compiled items in Jars and Pears.
*
* a pair of the v3CompiledPath (which is the container nbr/a0/ + the package-class-name slash + ".class"
* and the Container origRoot up to the start of the package and class name
* for the item being compiled.
* - Note: if a Jar has the same compiled class at multiple nesting levels, each one will have
* an instance of this class
*/
private static class V3CompiledPathAndContainerItemPath {
final Path v3CompiledPath;
final String pathInContainer;
public V3CompiledPathAndContainerItemPath(Path v3CompiledPath, String pathInContainer) {
this.v3CompiledPath = v3CompiledPath;
this.pathInContainer = pathInContainer;
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
si(sb).append("v3CompiledPathAndContainerPartPath [");
indent[0] += 2;
try {
si(sb).append("v3CompiledPath=").append(v3CompiledPath);
si(sb).append("pathInContainer=").append(pathInContainer);
} finally {
indent[0] -= 2;
si(sb).append("]");
}
return sb.toString();
}
}
private static final JavaCompiler javaCompiler = ToolProvider.getSystemJavaCompiler();
/****************************************************************
* Command line parameters
****************************************************************/
private static final String SOURCE_FILE_ROOTS = "-sourcesRoots";
private static final String CLASS_FILE_ROOTS = "-classesRoots";
private static final String OUTPUT_DIRECTORY = "-outputDirectory";
// private static final String SKIP_TYPE_CHECK = "-skipTypeCheck";
private static final String MIGRATE_CLASSPATH = "-migrateClasspath";
// private static final String CLASSES = "-classes"; // individual classes to migrate, get from supplied classpath
private static final Type intType = PrimitiveType.intType();
private static final Type callSiteType = JavaParser.parseType("CallSite");
private static final Type methodHandleType = JavaParser.parseType("MethodHandle");
private static final Type stringType = JavaParser.parseType("String");
private static final EnumSet<Modifier> public_static_final =
EnumSet.of(Modifier.PUBLIC, Modifier.STATIC, Modifier.FINAL);
private static final EnumSet<Modifier> private_static_final =
EnumSet.of(Modifier.PRIVATE, Modifier.STATIC, Modifier.FINAL);
private static final PrettyPrinterConfiguration printWithoutComments =
new PrettyPrinterConfiguration();
static { printWithoutComments.setPrintComments(false); }
private static final PrettyPrinterConfiguration printCu =
new PrettyPrinterConfiguration();
static { printCu.setIndent(" "); }
private static final String ERROR_DECOMPILING = "!!! ERROR:";
static private boolean isSource = false;
static private Path tempDir = null;
/***************************************************************************************************/
private String packageName; // with dots?
private String className; // (omitting package)
private String packageAndClassNameSlash;
// next 3 set at start of migrate for item being migrated
private CommonConverted current_cc;
private Path current_path;
private Container current_container;
/** includes trailing / */
private String outputDirectory;
/** includes trailing / */
private String outDirConverted;
/** includes trailing / */
private String outDirSkipped;
/** includes trailing / */
private String outDirLog;
private Container[] sourcesRoots = null; // only one of these has 1 or more Container instances
private Container[] classesRoots = null;
private CompilationUnit cu;
// save this value in the class instance to avoid recomputing it
private ClassLoader cachedMigrateClassLoader = null;
private String migrateClasspath = null;
// private String individualClasses = null; // to decompile
/**
* CommonConverted next id, by fqcn
* key: fqcn_slashes value: next id
*/
private Map<String, Integer> nextCcId = new HashMap<>();
/**
* Common info about a particular source-code instance of a class
* Used to avoid duplicate work for the same JCas definition
* Used to track identical and non-identical duplicate defs
*
* When processing from sourcesRoots:
* use map: origSourceToCommonConverted key = source string
* if found, skip conversion, use previous converted result.
*
* When processing from classesRoots:
* use map: origBytesToCommonConverted key = byte[], kept by container in container
* if found, use previous converted results
*/
private class CommonConverted {
/**
* starts at 0, incr for each new instance for a particular fqcn_slash
* can't be assigned until fqcn known
*/
int id = -1; // temp value
final String v2Source; // remembered original source
final byte[] v2ByteCode; // remembered original bytes
/** all paths + their containers having the same converted result
* Need container because might change classpath for compiling
* - path is to v2 source or compiled class*/
final Set<ContainerAndPath> containersAndV2Paths = new HashSet<>();
String v3Source; // if converted, the result
/** converted/v3/id-of-cc/pkg/name/classname.java */
Path v3SourcePath; // path to converted source or null
String fqcn_slash; // full name of the class e.g. java/util/Foo. unknown for sources at first
CommonConverted(String origSource, byte[] v2ByteCode, Path path, Container container, String fqcn_slash) {
this.v2Source = origSource;
this.v2ByteCode = v2ByteCode;
containersAndV2Paths.add(new ContainerAndPath(path, container));
this.fqcn_slash = fqcn_slash;
}
/**
*
* @param container having this commonConverted instance
* @return the path to .java or .class file.
* If the container is a Jar or PEAR, it is the path within that Jar or Pear FileSystem
*/
Path getV2SourcePath(Container container) {
for (ContainerAndPath cp : containersAndV2Paths) {
if (cp.container == container) {
return cp.path;
}
}
throw new RuntimeException("internalError");
}
int getId() {
if (id < 0) {
Integer nextId = nextCcId.computeIfAbsent(fqcn_slash, s -> INTEGER0);
nextCcId.put(fqcn_slash, nextId + 1);
this.id = nextId;
}
return id;
}
/* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
return v2Source == null ? 0 : v2Source.hashCode();
}
/* equal if the v2source is equal
*/
@Override
public boolean equals(Object obj) {
return obj instanceof CommonConverted &&
v2Source != null &&
v2Source.equals(((CommonConverted)obj).v2Source);
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
int maxLen = 10;
si(sb).append("CommonConverted [v2Source=").append(Misc.elide(v2Source, 100));
indent[0] += 2;
try {
si(sb).append("v2ByteCode=");
sb.append(v2ByteCode != null
? Arrays.toString(Arrays.copyOf(v2ByteCode, Math.min(v2ByteCode.length, maxLen))) :
"null").append(',');
si(sb).append("containersAndPaths=")
.append(containersAndV2Paths != null
? Misc.ppList(indent, Misc.setAsList(containersAndV2Paths), -1, StringBuilder::append)
: "null").append(',');
si(sb).append("v3SourcePath=").append(v3SourcePath).append(',');
si(sb).append("fqcn_slash=").append(fqcn_slash).append("]").append('\n');
} finally {
indent[0] -= 2;
}
return sb.toString();
}
}
/** Cache of already converted source classes, to avoid redoing them;
* - key is the actual source
* - value is CommonConverted
* This cache is over all containers
* */
private Map<String, CommonConverted> sourceToCommonConverted = new HashMap<>();
/**
* A map from fqcn_slash to a list of converted sources
* one per non-duplicated source
*/
private Map<String, List<CommonConverted>> classname2multiSources = new TreeMap<>();
/************************************
* Reporting
************************************/
// private final List<Path> v2JCasFiles = new ArrayList<>(); // unused
// private final List<Path> v3JCasFiles = new ArrayList<>(); // unused
private final List<PathContainerAndReason> nonJCasFiles = new ArrayList<>(); // path, reason
private final List<PathContainerAndReason> failedMigration = new ArrayList<>(); // path, reason
private final List<PathContainerAndReason> skippedBuiltins = new ArrayList<>(); // path, "built-in"
private final List<PathContainerAndReason> deletedCheckModified = new ArrayList<>(); // path, deleted check string
private final List<String1AndString2> pathWorkaround = new ArrayList<>(); // original, workaround
private final List<String1AndString2> pearClassReplace = new ArrayList<>(); // pear, classname
private final List<String1AndString2> jarClassReplace = new ArrayList<>(); // jar, classname
private final List<PathContainerAndReason> manualInspection = new ArrayList<>(); // path, reason
// private final List<PathAndPath> embeddedJars = new ArrayList<>(); // source, temp
private boolean isV2JCas; // false at start of migrate, set to true if a v2 class candidate is discovered
private boolean isConvert2v3; // true at start of migrate, set to false if conversion fails, left true if already a v3
private boolean isBuiltinJCas; // false at start of migrate, set to true if a built-in class is discovered
/************************************
* Context for visits
************************************/
/**
* if non-null, we're inside the ast for a likely JCas getter or setter method
*/
private MethodDeclaration get_set_method;
private String featName;
private boolean isGetter;
private boolean isArraySetter;
/**
* the range name part for _getXXXValue.. calls
*/
private Object rangeNamePart;
/**
* the range name part for _getXXXValue.. calls without converting Ref to Feature
*/
private String rangeNameV2Part;
/**
* temp place to insert static final int feature declarations
*/
private NodeList<BodyDeclaration<?>> fi_fields = new NodeList<>();
private Set<String> featNames = new HashSet<>();
private boolean hasV2Constructors;
private boolean hasV3Constructors;
private boolean error_decompiling = false;
private boolean badClassName;
private int itemCount;
/**
* set if getAndProcessCasndidatesInContainer encounters a class where it cannot do the compile
*/
private boolean unableToCompile;
final private StringBuilder psb = new StringBuilder();
public MigrateJCas() {
}
public static void main(String[] args) {
(new MigrateJCas()).run(args);
}
/***********************************
* Main
* @param args -
***********************************/
void run(String[] args) {
CommandLineParser clp = parseCommandArgs(args);
System.out.format("Output top directory: %s%n", outputDirectory);
// clear output dir
FileUtils.deleteRecursive(new File(outputDirectory));
isSource = sourcesRoots != null;
boolean isOk;
if (isSource) {
isOk = processRootsCollection("source", sourcesRoots, clp);
} else {
if (javaCompiler == null) {
System.out.println("The migration tool cannot compile the migrated files, \n"
+ " because no Java compiler is available.\n"
+ " To make one available, run this tool using a Java JDK, not JRE");
}
isOk = processRootsCollection("classes", classesRoots, clp);
}
// if (individualClasses != null) {
// processCollection("individual classes: ", new Iterator<String>() {
// Iterator<String> it = Arrays.asList(individualClasses.split(File.pathSeparator)).iterator();
// public boolean hasNext() {return it.hasNext();}
// public String next() {
// return prepareIndividual(it.next());}
// });
// }
if (error_decompiling) {
isOk = false;
}
isOk = report() && isOk;
System.out.println("Migration finished " +
(isOk
? "with no unusual conditions."
: "with 1 or more unusual conditions that need manual checking."));
}
/**
* called for compiled input when a compiler is available and don't have name collision
* if the container is a PEAR or a Jar
* Updates a copy of the Pear or Jar
* @param container
*/
private void postProcessPearOrJar(Container container) {
Path outDir = Paths.get(outputDirectory,
container.isJar ? "jars" : "pears",
Integer.toString(container.id));
withIOX(() -> Files.createDirectories(outDir));
si(psb).append("Replacing .class files in copy of ").append(container.rootOrig);
flush(psb);
try {
// copy the pear or jar so we don't change the original
Path lastPartOfPath = container.rootOrig.getFileName();
if (null == lastPartOfPath) throw new RuntimeException("Internal Error");
Path pearOrJarCopy = Paths.get(outputDirectory,
container.isJar ? "jars" : "pears",
Integer.toString(container.id),
lastPartOfPath.toString());
Files.copy(container.rootOrig, pearOrJarCopy);
// put up a file system on the pear or jar
FileSystem pfs = FileSystems.newFileSystem(pearOrJarCopy, (ClassLoader) null);
// replace the .class files in this PEAR or Jar with corresponding v3 ones
indent[0] += 2;
String[] previousSkip = {""};
container.v3CompiledPathAndContainerItemPath.forEach(c_p -> {
if (Files.exists(c_p.v3CompiledPath)) {
withIOX(() -> Files.copy(c_p.v3CompiledPath, pfs.getPath(c_p.pathInContainer), StandardCopyOption.REPLACE_EXISTING));
reportPearOrJarClassReplace(pearOrJarCopy.toString(), c_p.v3CompiledPath.toString(), container);
} else {
String pstr = c_p.v3CompiledPath.toString();
String pstr2 = pstr;
if (previousSkip[0] != "") {
int cmn = findFirstCharDifferent(previousSkip[0], pstr);
pstr2 = cmn > 5
? ("..." + pstr.substring(cmn))
: pstr;
}
previousSkip[0] = pstr;
si(psb).append("Skipping replacing ").append(pstr2)
.append(" because it could not be found, perhaps due to compile errors.");
flush(psb);
}
});
indent[0] -= 2;
// for (CommonConverted cc : container.convertedItems) {
// Map<Container, Path> v3ccs = cc.v3CompiledResultPaths;
// v3ccs.forEach((v3ccc, v3cc_path) ->
// {
// if (v3ccc == container) {
// String path_in_v3_classes = cc.v3CompiledResultPaths.get(container).toString();
//
// withIOX(() -> Files.copy(v3cc_path, pfs.getPath(path_in_v3_classes)));
// reportPearOrJarClassReplace(pearOrJarCopy.toString(), path_in_v3_classes, container);
// }
// });
// }
pfs.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Compile all the migrated JCas classes in this container, adjusting the classpath
* if the container is a Jar or Pear to include the Jar or PEAR.
*
* As a side effect, it saves in the container, a list of all the compiled things together
* with the path in container part, for use by a subsequent step to update copies of the jars/pears.
*
* The items in the container are broken into batches of multiple classes to be compiled together.
* - The grouping is to group by alternative number. This insures that multiple
* definitions of the same class are done separately (otherwise the compiler complains
* about multiple definitions).
*
* As a side effect. compiling update the container, adding all the compiled items
* to v3CompiledPathAndContainerItemPath
*
* @param container -
* @return true if compiled 1 or more sources, false if nothing was compiled
*/
private boolean compileV3SourcesCommon2(Container container) {
String classesBaseDir = outDirConverted + "v3-classes/" + container.id;
// specify the classpath. For PEARs use a class loader that loads first.
String classpath = getCompileClassPath(container);
// // debug
// String[] cpa = classpath.split(File.pathSeparator);
// System.out.println("debug - compilation classpath");
// int j = 0;
// for (String s : cpa) System.out.println("debug classpath: " + (++j) + " " + s);
// get a list of compilation unit path strings to the converted/v3/nnn/path
/**
* containerRoot is
* rootOrig or for Jars/Pears the Path to "/" in the zip file system
*/
Path containerRoot = null;
/**
* The Cu Path Strings for one container might have multiple instances of the class.
* These might be for identical or different sources.
* - This happens when a root has multiple paths to instances of the same class.
* - Multiple compiled-paths might be for the same classname
*
* For non-identical sources, the commonContainer instance "id" is spliced into the
* v3 migrated source path: see getBaseOutputPath, e.g. converted/2/a3/fqcn/slashed/name.java
*
* The compiler will complain if you feed it the same compilation unit classname twice, with
* different paths saying "duplicate class definition".
* - Fix: do compilation in batches, one for each different commonConverted id.
*/
Map<Integer, ArrayList<String>> cu_path_strings_by_ccId = new TreeMap<>(); // tree map to have nice order of keys
indent[0] += 2;
boolean isEmpty = true;
for (CommonConverted cc : container.convertedItems) {
if (cc.v3SourcePath == null) continue; // skip items that failed migration
isEmpty = false;
// relativePathInContainer = the whole path with the first part (up to the end of the container root) stripped off
/**
* itemPath is the original path in the container to where the source or class file is
* For Jars and PEARs, it is relative to the Jar or PEAR
*/
Path itemPath = cc.getV2SourcePath(container);
if (null == containerRoot) {
// lazy setup on first call
// for Pears, must use the == filesystem, otherwise get
// ProviderMismatchException
containerRoot = (container.isJar || container.isPear)
? itemPath.getFileSystem().getPath("/")
: container.rootOrig;
}
/**
* relativePathInContainer might be x/y/z/a/b/c/name.class
* (ends in .class because we only get here when the input is class files)
*/
String relativePathInContainer = containerRoot.relativize(itemPath).toString();
container.v3CompiledPathAndContainerItemPath.add(
new V3CompiledPathAndContainerItemPath(
Paths.get(classesBaseDir, "a" + cc.id, cc.fqcn_slash + ".class" /*relativePathInContainer*/),
relativePathInContainer));
ArrayList<String> items = cu_path_strings_by_ccId.computeIfAbsent(cc.id, x -> new ArrayList<>());
items.add(cc.v3SourcePath.toString());
}
if (isEmpty) {
si(psb).append("Skipping compiling for container ").append(container.id).append(" ").append(container.rootOrig);
si(psb).append(" because non of the v2 classes were migrated (might have been built-ins)");
flush(psb);
return false;
}
else {
si(psb).append("Compiling for container ").append(container.id).append(" ").append(container.rootOrig);
flush(psb);
}
// List<String> cu_path_strings = container.convertedItems.stream()
// .filter(cc -> cc.v3SourcePath != null)
// .peek(cc -> container.v3CompiledPathAndContainerItemPath.add(
// new V3CompiledPathAndContainerItemPath(
// Paths.get(classesBaseDir, cc.v3SourcePath.toString()),
// getPathInContainer(container, cc).toString())))
// .map(cc -> cc.v3SourcePath.toString())
// .collect(Collectors.toList());
boolean resultOk = true;
for (int ccId = 0;; ccId++) { // do each version of classes separately
List<String> cups = cu_path_strings_by_ccId.get(ccId);
if (cups == null) {
break;
}
StandardJavaFileManager fileManager = javaCompiler.getStandardFileManager(null, null, Charset.forName("UTF-8"));
Iterable<? extends JavaFileObject> compilationUnits =
fileManager.getJavaFileObjectsFromStrings(cups);
// //debug
// System.out.println("Debug: list of compilation unit strings for iteration " + i);
// int[] k = new int[] {0};
// cups.forEach(s -> System.out.println(Integer.toString(++(k[0])) + " " + s));
// System.out.println("debug end");
String classesBaseDirN = classesBaseDir + "/a" + ccId;
withIOX(() -> Files.createDirectories(Paths.get(classesBaseDirN)));
Iterable<String> options = Arrays.asList("-d", classesBaseDirN, "-classpath", classpath);
si(psb).append("Compiling for commonConverted version ").append(ccId)
.append(", ").append(cups.size()).append(" classes");
flush(psb);
DiagnosticCollector<JavaFileObject> diagnostics = new DiagnosticCollector<>();
/*********** Compile ***********/
resultOk = javaCompiler.getTask(null, fileManager, diagnostics, options, null, compilationUnits).call() && resultOk;
/********************************/
indent[0] += 2;
for (Diagnostic<? extends JavaFileObject> diagnostic : diagnostics.getDiagnostics()) {
JavaFileObject s = diagnostic.getSource();
si(psb).append(diagnostic.getKind());
int lineno = (int) diagnostic.getLineNumber();
if (lineno != Diagnostic.NOPOS) {
psb.append(" on line ").append(diagnostic.getLineNumber());
}
int pos = (int) diagnostic.getPosition();
if (pos != Diagnostic.NOPOS) {
psb.append(", position: ").append(diagnostic.getColumnNumber());
}
if (s != null) {
psb.append(" in ").append(s.toUri());
}
si(psb).append(" ").append(diagnostic.getMessage(null));
flush(psb);
}
withIOX( () -> fileManager.close());
indent[0] -= 2;
si(psb).append("Compilation finished").append(
resultOk ? " with no errors." : "with some errors.");
flush(psb);
}
indent[0] -= 2;
unableToCompile = !resultOk;
return true;
}
/**
* The classpath used to compile is (in precedence order)
* - the classpath for this migration app (first in order to pick up v3 support, overriding others)
* - any Pears, going up the parent chain, closest ones first
* - any Jars, going up the parent chain, closest ones last
* - passed in migrate classpath
* @return the classpath to use in compiling the jcasgen'd sources
*/
private String getCompileClassPath(Container container) {
// start with this (the v3migration tool) app's classpath to a cp string
URLClassLoader systemClassLoader = (URLClassLoader) ClassLoader.getSystemClassLoader();
URL[] urls = systemClassLoader.getURLs();
StringBuilder cp = new StringBuilder();
boolean firstTime = true;
for (URL url : urls) {
if (! firstTime) {
cp.append(File.pathSeparatorChar);
} else {
firstTime = false;
}
cp.append(url.getPath());
}
// pears up the classpath, closest first
Container c = container;
while (c != null) {
if (c.isPear) {
cp.append(File.pathSeparator).append(c.pearClasspath);
}
c = c.parent;
}
// add the migrateClasspath, expanded
if (null != migrateClasspath) {
cp.append(File.pathSeparator).append(Misc.expandClasspath(migrateClasspath));
}
// add the Jars, closest last
c = container;
List<String> ss = new ArrayList<>();
while (c != null) {
if (c.isJar) {
ss.add(c.root.toString());
}
c = c.parent;
}
Collections.reverse(ss);
ss.forEach(s -> cp.append(File.pathSeparator).append(s));
// System.out.println("debug: compile classpath = " + cp.toString());
return cp.toString();
}
/**
* iterate to process collections from all roots
* Called once, to process either sources or classes
* @return false if unable to compile, true otherwise
*/
private boolean processRootsCollection(String kind, Container[] roots, CommandLineParser clp) {
unableToCompile = false; // preinit
psb.setLength(0);
indent[0] = 0;
itemCount = 1;
for (Container rootContainer : roots) {
showWorkStart(rootContainer);
// adds candidates to root containers, and adds sub containers for Jars and Pears
getAndProcessCandidatesInContainer(rootContainer);
// for (Path path : rootContainer.candidates) {
//
// CommonConverted cc = getSource(path, rootContainer);
// migrate(cc, rootContainer, path);
//
// if ((i % 50) == 0) System.out.format("%4d%n ", Integer.valueOf(i));
// i++;
// }
}
si(psb).append("Total number of candidates processed: ").append(itemCount - 1);
flush(psb);
indent[0] = 0;
return !unableToCompile;
}
private void showWorkStart(Container rootContainer) {
si(psb).append("Migrating " + rootContainer.rootOrig.toString());
indent[0] += 2;
si(psb).append("Each character is one class");
si(psb).append(" . means normal class");
si(psb).append(" b means built in");
si(psb).append(" i means identical duplicate");
si(psb).append(" d means non-identical definition for the same JCas class");
si(psb).append(" nnn at the end of the line is the number of classes migrated\n");
flush(psb);
}
/**
* parse command line args
* @param args -
* @return the CommandLineParser instance
*/
private CommandLineParser parseCommandArgs(String[] args) {
CommandLineParser clp = createCmdLineParser();
try {
clp.parseCmdLine(args);
} catch (Exception e) {
throw new RuntimeException(e);
}
if (!checkCmdLineSyntax(clp)) {
printUsage();
System.exit(2);
}
if (clp.isInArgsList(CLASS_FILE_ROOTS)) {
classesRoots = getRoots(clp, CLASS_FILE_ROOTS);
}
if (clp.isInArgsList(SOURCE_FILE_ROOTS)) {
sourcesRoots = getRoots(clp, SOURCE_FILE_ROOTS);
}
return clp;
}
private Container[] getRoots(CommandLineParser clp, String kind) {
String[] paths = clp.getParamArgument(kind).split("\\" + File.pathSeparator);
Container[] cs = new Container[paths.length];
int i = 0;
for (String path : paths) {
cs[i++] = new Container(null, Paths.get(path));
}
return cs;
}
/**
* @param p the path to the compiled or non-compiled source
* @param container the container
* @return the instance of the CommonConverted object,
* and update the container's convertedItems list if needed to include it
*/
private CommonConverted getSource(Path p, Container container) {
try {
byte[] localV2ByteCode = null;
CommonConverted cc;
String v2Source;
if (!isSource) {
localV2ByteCode = Files.readAllBytes(p);
// only use prev decompiled if same container
cc = container.origBytesToCommonConverted.get(localV2ByteCode);
if (null != cc) {
return cc;
}
// decompile side effect: sets fqcn
try {
v2Source = decompile(localV2ByteCode, container.pearClasspath);
} catch (RuntimeException e) {
badClassName = true;
e.printStackTrace();
v2Source = null;
}
if (badClassName) {
System.err.println("Candidate with bad Class Name is: " + p.toString());
return null;
}
final byte[] finalbc = localV2ByteCode;
cc = sourceToCommonConverted.computeIfAbsent(v2Source,
src -> new CommonConverted(src, finalbc, p, container, packageAndClassNameSlash));
// cc = new CommonConverted(v2Source, localV2ByteCode, p, container, packageAndClassNameSlash);
container.origBytesToCommonConverted.put(localV2ByteCode, cc);
} else {
v2Source = FileUtils.reader2String(Files.newBufferedReader(p));
cc = sourceToCommonConverted.get(v2Source);
if (null == cc) {
cc = new CommonConverted(v2Source, null, p, container, "unknown");
sourceToCommonConverted.put(v2Source, cc);
} else {
// add this new path + container to set of pathsAndContainers kept by this CommonConverted object
cc.containersAndV2Paths.add(new ContainerAndPath(p, container));
}
}
//Containers have list of CommonConverted, which, in turn
// have Set of ContainerAndPath elements.
// (the same JCas class might appear in two different paths in a container)
if (!container.convertedItems.contains(cc)) {
container.convertedItems.add(cc);
}
return cc;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Migrate one JCas definition, writes to Sysem.out 1 char to indicate progress.
*
* The source is either direct, or a decompiled version of a .class file (missing comments, etc.).
*
* This method only called if heuristics indicate this is a V2 JCas class definition.
*
* Skips the migration if already done.
* Skips if decompiling, and it failed.
*
* The goal is to preserve as much as possible of existing customization.
* The general approach is to parse the source into an AST, and use visitor methods.
* For getter/setter methods that are for features (heurstic), set up a context for inner visitors
* identifying the getter / setter.
* - reuse method declarator, return value casts, value expressions
* - remove feature checking statement, array bounds checking statement, if present.
* - replace the simpleCore (see Jg), replace the arrayCore
*
* For constructors, replace the 2-arg one that has arguments:
* addr and TOP_Type with the v3 one using TypeImpl, CasImpl.
*
* Add needed imports.
* Add for each feature the _FI_xxx static field declarator.
*
* Leave other top level things alone
* - additional constructors.
* - other methods not using jcasType refs
*
* @param source - the source, either directly from a .java file, or a decompiled .class file
*/
private void migrate(CommonConverted cc, Container container, Path path) {
if (null == cc) {
System.err.println("Skipping this component due to decompile failure: " + path.toString());
System.err.println(" in container: " + container);
isConvert2v3 = false;
error_decompiling = true;
return;
}
if (cc.v3Source != null) {
// next updates classname2multiSources for tracking non-identical defs
boolean identical = collectInfoForReports(cc);
assert identical;
psb.append("i");
flush(psb);
cc.containersAndV2Paths.add(new ContainerAndPath(path, container));
return;
}
assert cc.v2Source != null;
packageName = null;
className = null;
packageAndClassNameSlash = null;
cu = null;
String source = cc.v2Source;
isConvert2v3 = true; // preinit, set false if convert fails
isV2JCas = false; // preinit, set true by reportV2Class, called by visit to ClassOrInterfaceDeclaration,
// when it has v2 constructors, and the right type and type_index_id field declares
isBuiltinJCas = false;
featNames.clear();
fi_fields.clear();
try { // to reset the next 3 items
current_cc = cc;
current_container = container;
current_path = path;
// System.out.println("Migrating source before migration:\n");
// System.out.println(source);
// System.out.println("\n\n\n");
if (source.startsWith(ERROR_DECOMPILING)) {
System.err.println("Decompiling failed for class: " + cc.toString() + "\n got: " + Misc.elide(source, 300, false));
System.err.println("Please check the migrateClasspath");
if (null == migrateClasspath) {
System.err.println("classpath of this app is");
System.err.println(System.getProperty("java.class.path"));
} else {
System.err.println(" first part of migrateClasspath argument was: " + Misc.elide(migrateClasspath, 300, false));
System.err.println(" Value used was:");
URL[] urls = Misc.classpath2urls(migrateClasspath);
for (URL url : urls) {
System.err.println(" " + url.toString());
}
}
System.err.println("Skipping this component");
isConvert2v3 = false;
error_decompiling = true;
return;
}
StringReader sr = new StringReader(source);
try {
cu = JavaParser.parse(sr);
addImport("java.lang.invoke.CallSite");
addImport("java.lang.invoke.MethodHandle");
addImport("org.apache.uima.cas.impl.CASImpl");
addImport("org.apache.uima.cas.impl.TypeImpl");
addImport("org.apache.uima.cas.impl.TypeSystemImpl");
this.visit(cu, null); // side effect: sets the className, packageAndClassNameSlash, packageName
new removeEmptyStmts().visit(cu, null);
if (isConvert2v3) {
removeImport("org.apache.uima.jcas.cas.TOP_Type");
}
if (isConvert2v3 && fi_fields.size() > 0) {
NodeList<BodyDeclaration<?>> classMembers = cu.getTypes().get(0).getMembers();
int positionOfFirstConstructor = findConstructor(classMembers);
if (positionOfFirstConstructor < 0) {
throw new RuntimeException();
}
classMembers.addAll(positionOfFirstConstructor, fi_fields);
}
ImportDeclaration firstImport = cu.getImports().get(0);
String transformedMessage = String.format(" Migrated by uimaj-v3-migration-jcas, %s%n" +
" Container: %s%n" +
" Path in container: %s%n",
new Date(),
container.toString1(),
path.toString()).replace('\\','/');
Optional<Comment> existingComment = firstImport.getComment();
if (existingComment.isPresent()) {
Comment comment = existingComment.get();
comment.setContent(comment.getContent() + "\n" + transformedMessage);
} else {
firstImport.setBlockComment(transformedMessage);
}
if (isSource) {
sourceToCommonConverted.put(source, cc);
}
boolean identicalFound = collectInfoForReports(cc);
assert ! identicalFound;
if (isV2JCas) {
writeV2Orig(cc, isConvert2v3);
}
if (isConvert2v3) {
cc.v3Source = new PrettyPrinter(printCu).print(cu);
writeV3(cc);
}
psb.append(isBuiltinJCas
? "b"
: (classname2multiSources.get(cc.fqcn_slash).size() == 1)
? "."
: "d"); // means non-identical duplicate
flush(psb);
} catch (IOException e) {
e.printStackTrace();
throw new UIMARuntimeException(e);
} catch (Exception e) {
System.out.println("debug: exception caught, source was\n" + source);
throw new UIMARuntimeException(e);
}
} finally {
current_cc = null;
current_container = null;
current_path = null;
}
}
/**
* Called when have already converted this exact source or
* when we just finished converting this source.
* Add this instance to the tracking information for multiple versions (identical or not) of a class
* @return true if this is an identical duplicate of one already done
*/
private boolean collectInfoForReports(CommonConverted cc) {
String fqcn_slash = cc.fqcn_slash;
// track, by fqcn, all duplicates (identical or not)
// // for a given fully qualified class name (slashified),
// // find the list of CommonConverteds - one per each different version
// // create it if null
List<CommonConverted> commonConverteds = classname2multiSources
.computeIfAbsent(fqcn_slash, k -> new ArrayList<>());
// search to see if this instance already in the set
// if so, add the path to the set of identicals
// For class sources case, we compare the decompiled version
boolean found = commonConverteds.contains(cc);
if (!found) {
commonConverteds.add(cc);
}
return found;
}
/******************
* Visitors
******************/
/**
* Capture the type name from all top-level types
* AnnotationDeclaration, Empty, and Enum
*/
@Override
public void visit(AnnotationDeclaration n, Object ignore) {
updateClassName(n);
super.visit(n, ignore);
}
// @Override
// public void visit(EmptyTypeDeclaration n, Object ignore) {
// updateClassName(n);
// super.visit(n, ignore);
// }
@Override
public void visit(EnumDeclaration n, Object ignore) {
updateClassName(n);
super.visit(n, ignore);
}
/**
* Check if the top level class looks like a JCas class, and report if not:
* has 0, 1, and 2 element constructors
* has static final field defs for type and typeIndexID
*
* Also check if V2 style: 2 arg constructor arg types
* Report if looks like V3 style due to args of 2 arg constructor
*
* if class doesn't extend anything, not a JCas class.
* if class is enum, not a JCas class
* @param n -
* @param ignore -
*/
@Override
public void visit(ClassOrInterfaceDeclaration n, Object ignore) {
// do checks to see if this is a JCas class; if not report skipped
Optional<Node> maybeParent = n.getParentNode();
if (maybeParent.isPresent()) {
Node parent = maybeParent.get();
if (parent instanceof CompilationUnit) {
updateClassName(n);
if (isBuiltinJCas) {
// is a built-in class, skip it
super.visit(n, ignore);
return;
}
NodeList<ClassOrInterfaceType> supers = n.getExtendedTypes();
if (supers == null || supers.size() == 0) {
reportNotJCasClass("class doesn't extend a superclass");
super.visit(n, ignore);
return;
}
NodeList<BodyDeclaration<?>> members = n.getMembers();
setHasJCasConstructors(members);
if (hasV2Constructors && hasTypeFields(members)) {
reportV2Class();
super.visit(n, ignore);
return;
}
if (hasV2Constructors) {
reportNotJCasClassMissingTypeFields();
return;
}
if (hasV3Constructors) {
reportV3Class();
return;
}
reportNotJCasClass("missing v2 constructors");
return;
}
}
super.visit(n, ignore);
return;
}
@Override
public void visit(PackageDeclaration n, Object ignored) {
packageName = n.getNameAsString();
super.visit(n, ignored);
}
/***************
* Constructors
* - modify the 2 arg constructor - changing the args and the body
* @param n - the constructor node
* @param ignored -
*/
@Override
public void visit(ConstructorDeclaration n, Object ignored) {
super.visit(n, ignored); // processes the params
if (!isConvert2v3) { // for enums, annotations
return;
}
List<Parameter> ps = n.getParameters();
if (ps.size() == 2 &&
getParmTypeName(ps, 0).equals("int") &&
getParmTypeName(ps, 1).equals("TOP_Type")) {
/** public Foo(TypeImpl type, CASImpl casImpl) {
* super(type, casImpl);
* readObject();
*/
setParameter(ps, 0, "TypeImpl", "type");
setParameter(ps, 1, "CASImpl", "casImpl");
// Body: change the 1st statement (must be super)
NodeList<Statement> stmts = n.getBody().getStatements();
if (!(stmts.get(0) instanceof ExplicitConstructorInvocationStmt)) {
recordBadConstructor("missing super call");
return;
}
NodeList<Expression> args = ((ExplicitConstructorInvocationStmt)(stmts.get(0))).getArguments();
args.set(0, new NameExpr("type"));
args.set(1, new NameExpr("casImpl"));
// leave the rest unchanged.
}
}
private final static Pattern refGetter = Pattern.compile("(ll_getRef(Array)?Value)|"
+ "(ll_getFSForRef)");
private final static Pattern word1 = Pattern.compile("\\A(\\w*)"); // word chars starting at beginning \\A means beginning
/*****************************
* Method Declaration Visitor
* Heuristic to determine if a feature getter or setter:
* - name: is 4 or more chars, starting with get or set, with 4th char uppercase
* is not "getTypeIndexID"
* - (optional - if comments are available:)
* getter for xxx, setter for xxx
* - for getter: has 0 or 1 arg (1 arg case for indexed getter, arg must be int type)
* - for setter: has 1 or 2 args
*
* Workaround for decompiler - getters which return FSs might be missing the cast to the return value type
*
*****************************/
@Override
public void visit(MethodDeclaration n, Object ignore) {
String name = n.getNameAsString();
isGetter = isArraySetter = false;
do { // to provide break exit
if (name.length() >= 4 &&
((isGetter = name.startsWith("get")) || name.startsWith("set")) &&
Character.isUpperCase(name.charAt(3)) &&
!name.equals("getTypeIndexID")) {
List<Parameter> ps = n.getParameters();
if (isGetter) {
if (ps.size() > 1) break;
} else { // is setter
if (ps.size() > 2 ||
ps.size() == 0) break;
if (ps.size() == 2) {
if (!getParmTypeName(ps, 0).equals("int")) break;
isArraySetter = true;
}
}
// get the range-part-name and convert to v3 range ("Ref" changes to "Feature")
String bodyString = n.getBody().get().toString(printWithoutComments);
int i = bodyString.indexOf("jcasType.ll_cas.ll_");
if (i < 0) break;
String s = bodyString.substring(i + "jcasType.ll_cas.ll_get".length()); // also for ...ll_set - same length!
if (s.startsWith("FSForRef(")) { // then it's the wrapper and the wrong instance.
i = s.indexOf("jcasType.ll_cas.ll_");
if (i < 0) {
reportUnrecognizedV2Code("Found \"jcasType.ll_cas.ll_[set or get]...FSForRef(\" but didn't find following \"jcasType.ll_cas_ll_\"\n" + n.toString());
break;
}
s = s.substring(i + "jcasType.ll_cas.ll_get".length());
}
i = s.indexOf("Value");
if (i < 0) {
reportUnrecognizedV2Code("Found \"jcasType.ll_cas.ll_[set or get]\" but didn't find following \"Value\"\n" + n.toString());
break; // give up
}
s = Character.toUpperCase(s.charAt(0)) + s.substring(1, i);
rangeNameV2Part = s;
rangeNamePart = s.equals("Ref") ? "Feature" : s;
// get feat name following ")jcasType).casFeatCode_xxxxx,
i = bodyString.indexOf("jcasType).casFeatCode_");
if (i == -1) {
reportUnrecognizedV2Code("Didn't find \"...jcasType).casFeatCode_\"\n" + n.toString());
break;
}
Matcher m = word1.matcher(bodyString.substring(i + "jcasType).casFeatCode_".length() ));
if (!m.find()) {
reportUnrecognizedV2Code("Found \"...jcasType).casFeatCode_\" but didn't find subsequent word\n" + n.toString());
break;
}
featName = m.group(1);
String fromMethod = Character.toLowerCase(name.charAt(3)) + name.substring(4);
if (!featName.equals(fromMethod)) {
// don't report if the only difference is the first letter captialization
if (!(Character.toLowerCase(featName.charAt(0)) + featName.substring(1)).equals(fromMethod)) {
reportMismatchedFeatureName(String.format("%-25s %s", featName, name));
}
}
// add _FI_xxx = TypeSystemImpl.getAdjustedFeatureOffset("xxx");
// replaced Sept 2017
// NodeList<Expression> args = new NodeList<>();
// args.add(new StringLiteralExpr(featName));
// VariableDeclarator vd = new VariableDeclarator(
// intType,
// "_FI_" + featName,
// new MethodCallExpr(new NameExpr("TypeSystemImpl"), new SimpleName("getAdjustedFeatureOffset"), args));
// if (featNames.add(featName)) { // returns true if it was added, false if already in the set of featNames
// fi_fields.add(new FieldDeclaration(public_static_final, vd));
// }
// add _FC_xxx = TypeSystemImpl.createCallSite(ccc.class, "xxx");
// add _FH_xxx = _FC_xxx.dynamicInvoker();
// add _FeatName_xxx = "xxx" // https://issues.apache.org/jira/browse/UIMA-5575
if (featNames.add(featName)) { // returns true if it was added, false if already in the set of featNames
// _FC_xxx = TypeSystemImpl.createCallSite(ccc.class, "xxx");
MethodCallExpr initCallSite = new MethodCallExpr(new NameExpr("TypeSystemImpl"), "createCallSite");
initCallSite.addArgument(new FieldAccessExpr(new NameExpr(className), "class"));
initCallSite.addArgument(new StringLiteralExpr(featName));
VariableDeclarator vd_FC = new VariableDeclarator(callSiteType, "_FC_" + featName, initCallSite);
fi_fields.add(new FieldDeclaration(private_static_final, vd_FC));
// _FH_xxx = _FC_xxx.dynamicInvoker();
MethodCallExpr initInvoker = new MethodCallExpr(new NameExpr(vd_FC.getName()), "dynamicInvoker");
VariableDeclarator vd_FH = new VariableDeclarator(methodHandleType, "_FH_" + featName, initInvoker);
fi_fields.add(new FieldDeclaration(private_static_final, vd_FH));
// _FeatName_xxx = "xxx" // https://issues.apache.org/jira/browse/UIMA-5575
VariableDeclarator vd_fn = new VariableDeclarator(stringType, "_FeatName_" + featName, new StringLiteralExpr(featName));
fi_fields.add(new FieldDeclaration(public_static_final, vd_fn));
}
/**
* add missing cast stmt for
* return stmts where the value being returned:
* - doesn't have a cast already
* - has the expression be a methodCallExpr with a name which looks like:
* ll_getRefValue or
* ll_getRefArrayValue
*/
if (isGetter && "Feature".equals(rangeNamePart)) {
for (Statement stmt : n.getBody().get().getStatements()) {
if (stmt instanceof ReturnStmt) {
Expression e = getUnenclosedExpr(((ReturnStmt)stmt).getExpression().get());
if ((e instanceof MethodCallExpr)) {
String methodName = ((MethodCallExpr)e).getNameAsString();
if (refGetter.matcher(methodName).matches()) { // ll_getRefValue or ll_getRefArrayValue
addCastExpr(stmt, n.getType());
}
}
}
}
}
get_set_method = n; // used as a flag during inner "visits" to signal
// we're inside a likely feature setter/getter
} // end of test for getter or setter method
} while (false); // do once, provide break exit
super.visit(n, ignore);
get_set_method = null; // after visiting, reset the get_set_method to null
}
/**
* Visitor for if stmts
* - removes feature missing test
*/
@Override
public void visit(IfStmt n, Object ignore) {
do {
// if (get_set_method == null) break; // sometimes, these occur outside of recogn. getters/setters
Expression c = n.getCondition(), e;
BinaryExpr be, be2;
List<Statement> stmts;
if ((c instanceof BinaryExpr) &&
((be = (BinaryExpr)c).getLeft() instanceof FieldAccessExpr) &&
((FieldAccessExpr)be.getLeft()).getNameAsString().equals("featOkTst")) {
// remove the feature missing if statement
// verify the remaining form
if (! (be.getRight() instanceof BinaryExpr)
|| ! ((be2 = (BinaryExpr)be.getRight()).getRight() instanceof NullLiteralExpr)
|| ! (be2.getLeft() instanceof FieldAccessExpr)
|| ! ((e = getExpressionFromStmt(n.getThenStmt())) instanceof MethodCallExpr)
|| ! (((MethodCallExpr)e).getNameAsString()).equals("throwFeatMissing")) {
reportDeletedCheckModified("The featOkTst was modified:\n" + n.toString() + '\n');
}
BlockStmt parent = (BlockStmt) n.getParentNode().get();
stmts = parent.getStatements();
stmts.set(stmts.indexOf(n), new EmptyStmt()); //dont remove
// otherwise iterators fail
// parent.getStmts().remove(n);
return;
}
} while (false);
super.visit(n, ignore);
}
/**
* visitor for method calls
*/
@Override
public void visit(MethodCallExpr n, Object ignore) {
Optional<Node> p1, p2, p3 = null;
Node updatedNode = null;
NodeList<Expression> args;
do {
if (get_set_method == null) break;
/** remove checkArraybounds statement **/
if (n.getNameAsString().equals("checkArrayBounds") &&
((p1 = n.getParentNode()).isPresent() && p1.get() instanceof ExpressionStmt) &&
((p2 = p1.get().getParentNode()).isPresent() && p2.get() instanceof BlockStmt) &&
((p3 = p2.get().getParentNode()).isPresent() && p3.get() == get_set_method)) {
NodeList<Statement> stmts = ((BlockStmt)p2.get()).getStatements();
stmts.set(stmts.indexOf(p1.get()), new EmptyStmt());
return;
}
// convert simpleCore expression ll_get/setRangeValue
boolean useGetter = isGetter || isArraySetter;
if (n.getNameAsString().startsWith("ll_" + (useGetter ? "get" : "set") + rangeNameV2Part + "Value")) {
args = n.getArguments();
if (args.size() != (useGetter ? 2 : 3)) break;
String suffix = useGetter ? "Nc" : rangeNamePart.equals("Feature") ? "NcWj" : "Nfc";
String methodName = "_" + (useGetter ? "get" : "set") + rangeNamePart + "Value" + suffix;
args.remove(0); // remove the old addr arg
// arg 0 converted when visiting args FieldAccessExpr
n.setScope(null);
n.setName(methodName);
}
// convert array sets/gets
String z = "ll_" + (isGetter ? "get" : "set");
String nname = n.getNameAsString();
if (nname.startsWith(z) &&
nname.endsWith("ArrayValue")) {
String s = nname.substring(z.length());
s = s.substring(0, s.length() - "Value".length()); // s = "ShortArray", etc.
if (s.equals("RefArray")) s = "FSArray";
if (s.equals("IntArray")) s = "IntegerArray";
EnclosedExpr ee = new EnclosedExpr(
new CastExpr(new ClassOrInterfaceType(s), n.getArguments().get(0)));
n.setScope(ee); // the getter for the array fs
n.setName(isGetter ? "get" : "set");
n.getArguments().remove(0);
}
/** remove ll_getFSForRef **/
/** remove ll_getFSRef **/
if (n.getNameAsString().equals("ll_getFSForRef") ||
n.getNameAsString().equals("ll_getFSRef")) {
updatedNode = replaceInParent(n, n.getArguments().get(0));
}
} while (false);
if (updatedNode != null) {
updatedNode.accept(this, null);
} else {
super.visit(n, null);
}
}
/**
* visitor for field access expressions
* - convert ((...type_Type)jcasType).casFeatCode_XXXX to _FI_xxx
* @param n -
* @param ignore -
*/
@Override
public void visit(FieldAccessExpr n, Object ignore) {
Expression e;
Optional<Expression> oe;
String nname = n.getNameAsString();
if (get_set_method != null) {
if (nname.startsWith("casFeatCode_") &&
((oe = n.getScope()).isPresent()) &&
((e = getUnenclosedExpr(oe.get())) instanceof CastExpr) &&
("jcasType".equals(getName(((CastExpr)e).getExpression())))) {
String featureName = nname.substring("casFeatCode_".length());
// replaceInParent(n, new NameExpr("_FI_" + featureName)); // repl last in List<Expression> (args)
MethodCallExpr getint = new MethodCallExpr(null, "wrapGetIntCatchException");
getint.addArgument(new NameExpr("_FH_" + featureName));
replaceInParent(n, getint);
return;
} else if (nname.startsWith("casFeatCode_")) {
reportMigrateFailed("Found field casFeatCode_ ... without a previous cast expr using jcasType");
}
}
super.visit(n, ignore);
}
private class removeEmptyStmts extends VoidVisitorAdapter<Object> {
@Override
public void visit(BlockStmt n, Object ignore) {
n.getStatements().removeIf(statement -> statement instanceof EmptyStmt);
super.visit(n, ignore);
}
// @Override
// public void visit(MethodDeclaration n, Object ignore) {
// if (n.getNameAsString().equals("getModifiablePrimitiveNodes")) {
// System.out.println("debug");
// }
// super.visit(n, ignore);
// if (n.getNameAsString().equals("getModifiablePrimitiveNodes")) {
// System.out.println("debug");
// }
// }
}
/**
* converted files:
* java name, path (sorted by java name, v3 name only)
* not-converted:
* java name, path (sorted by java name)
* duplicates:
* java name, path (sorted by java name)
* @return true if it's likely everything converted OK.
*/
private boolean report() {
System.out.println("\n\nMigration Summary");
System.out.format("Output top directory: %s%n", outputDirectory);
System.out.format("Date/time: %tc%n", new Date());
pprintRoots("Sources", sourcesRoots);
pprintRoots("Classes", classesRoots);
boolean isOk2 = true;
try {
// these reports, if non-empty items, imply something needs manual checking, so reset isOk2
isOk2 = reportPaths("Workaround Directories", "workaroundDir.txt", pathWorkaround) && isOk2;
isOk2 = reportPaths("Reports of converted files where a deleted check was customized", "deletedCheckModified.txt", deletedCheckModified) && isOk2;
isOk2 = reportPaths("Reports of converted files needing manual inspection", "manualInspection.txt", manualInspection) && isOk2;
isOk2 = reportPaths("Reports of files which failed migration", "failed.txt", failedMigration) && isOk2;
isOk2 = reportPaths("Reports of non-JCas files", "NonJCasFiles.txt", nonJCasFiles) && isOk2;
isOk2 = reportPaths("Builtin JCas classes - skipped - need manual checking to see if they are modified",
"skippedBuiltins.txt", skippedBuiltins) && isOk2;
// these reports, if non-empty, do not imply OK issues
reportPaths("Reports of updated Jars", "jarFileUpdates.txt", jarClassReplace);
reportPaths("Reports of updated PEARs", "pearFileUpdates.txt", pearClassReplace);
// computeDuplicates();
// reportPaths("Report of duplicates - not identical", "nonIdenticalDuplicates.txt", nonIdenticalDuplicates);
// reportPaths("Report of duplicates - identical", "identicalDuplicates.txt", identicalDuplicates);
// isOk2 = reportDuplicates() && isOk2; // false if non-identical duplicates
return isOk2;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private void pprintRoots(String kind, Container[] roots) {
if (roots != null && roots.length > 0) {
try {
try (BufferedWriter bw = Files.newBufferedWriter(makePath(outDirLog + "ItemsProcessed"), StandardOpenOption.CREATE)) {
logPrintNl(kind + " Roots:", bw);
indent[0] += 2;
try {
for (Container container : roots) {
pprintContainer(container, bw);
}
logPrintNl("", bw);
} finally {
indent[0] -= 2;
}
}
} catch (IOException e) {
throw new UIMARuntimeException(e);
}
}
}
private void pprintContainer(Container container, BufferedWriter bw) throws IOException {
logPrintNl(container.toString(), bw);
if (container.subContainers.size() > 1) {
logPrintNl("", bw);
indent[0] += 2;
for (Container subc : container.subContainers) {
pprintContainer(subc, bw);
}
}
}
// private void computeDuplicates() {
// List<ClassnameAndPath> toCheck = new ArrayList<>(c2ps);
// toCheck.addAll(extendableBuiltins);
// sortReport2(toCheck);
// ClassnameAndPath prevP = new ClassnameAndPath(null, null);
// List<ClassnameAndPath> sameList = new ArrayList<>();
// boolean areAllEqual = true;
//
// for (ClassnameAndPath p : toCheck) {
// if (!p.getFirst().equals(prevP.getFirst())) {
//
// addToIdenticals(sameList, areAllEqual);
// sameList.clear();
// areAllEqual = true;
//
// prevP = p;
// continue;
// }
//
// // have 2nd or subsequent same class
// if (sameList.size() == 0) {
// sameList.add(prevP);
// }
// sameList.add(p);
// if (areAllEqual) {
// if (isFilesMiscompare(p.path, prevP.path)) {
// areAllEqual = false;
// }
// }
// }
//
// addToIdenticals(sameList, areAllEqual);
// }
// /**
// * Compare two java source or class files
// * @param p1
// * @param p2
// * @return
// */
// private boolean isFilesMiscompare(Path p1, Path p2) {
// String s1 = (p1);
// String s2 = (p2);
// return !s1.equals(s2);
// }
// private void addToIdenticals(List<ClassnameAndPath> sameList, boolean areAllEqual) {
// if (sameList.size() > 0) {
// if (areAllEqual) {
// identicalDuplicates.addAll(sameList);
// } else {
// nonIdenticalDuplicates.addAll(sameList);
// }
// }
// }
/**
*
* @param name
* @return a path made from name, with directories created
* @throws IOException
*/
private Path makePath(String name) throws IOException {
Path p = Paths.get(name);
Path parent = p.getParent(); // all the parts of the path up to the final segment
if (parent == null) {
return p;
}
try {
Files.createDirectories(parent);
} catch (FileAlreadyExistsException e) { // parent already exists but is not a directory!
// caused by running on Windows system which ignores "case"
// there's a file at /x/y/ named "z", but the path wants to be /x/y/Z/
// Workaround: change "z" to "z_c" c for capitalization issue
current_container.haveDifferentCapitalizedNamesCollidingOnWindows = true;
Path fn = parent.getFileName();
if (fn == null) {
throw new IllegalArgumentException();
}
String newDir = fn.toString() + "_c";
Path parent2 = parent.getParent();
Path p2 = parent2 == null ? Paths.get(newDir) : Paths.get(parent2.toString(), newDir);
try {
Files.createDirectories(p2);
} catch (FileAlreadyExistsException e2) { // parent already exists but is not a directory!
throw new RuntimeException(e2);
}
reportPathWorkaround(parent.toString(), p2.toString());
Path lastPartOfPath = p.getFileName();
if (null == lastPartOfPath) throw new RuntimeException();
return Paths.get(p2.toString(), lastPartOfPath.toString());
}
return p;
}
private void logPrint(String msg, Writer bw) throws IOException {
System.out.print(msg);
bw.write(msg);
}
private void logPrintNl(String msg, Writer bw) throws IOException {
logPrint(msg, bw);
logPrint("\n", bw);
}
/**
* prints "There were no xxx" if there are no items.
* prints a title, followed by a ================== underneath it
*
* prints a sorted report of two fields.
*
* @param title title of report
* @param fileName file name to save the report in (as well as print to sysout
* @param items the set of items to report on-
* @return true if items were empty
* @throws IOException -
*/
private <T, U> boolean reportPaths(String title, String fileName, List<? extends Report2<T, U>> items) throws IOException {
if (items.size() == 0) {
System.out.println("There were no " + title);
return true;
}
System.out.println("\n" + title);
for (int i = 0; i < title.length(); i++) System.out.print('=');
System.out.println("");
try (BufferedWriter bw = Files.newBufferedWriter(makePath(outDirLog + fileName), StandardOpenOption.CREATE)) {
List<Report2<T, U>> sorted = new ArrayList<>(items);
sortReport2(sorted);
int max = 0;
int nbrFirsts = 0;
Object prevFirst = null;
for (Report2<T, U> p : sorted) {
max = Math.max(max, p.getFirstLength());
Comparable<T> first = p.getFirst();
if (first != prevFirst) {
prevFirst = first;
nbrFirsts ++;
}
}
/**
* Two styles.
* Style 1: where nbrFirst <= 25% nbr: first on separate line, seconds indented
* Style 2: firsts and seconds on same line.
*/
int i = 1;
boolean style1 = nbrFirsts <= sorted.size() / 4;
prevFirst = null;
for (Report2<T, U> p : sorted) {
if (style1) {
if (prevFirst != p.getFirst()) {
prevFirst = p.getFirst();
logPrintNl(String.format("\n For: %s", p.getFirst()), bw);
}
logPrintNl(String.format(" %5d %s", i, p.getSecond()), bw);
} else {
logPrintNl(String.format("%5d %-" +max+ "s %s", i, p.getFirst(), p.getSecond()), bw);
}
i++;
}
System.out.println("");
} // end of try-with-resources
return false;
}
private boolean isZipFs(Object o) {
// Surprise! sometimes the o is not an instance of FileSystem but is the zipfs anyways
return o.getClass().getName().contains("zipfs"); // java 8 and 9
}
/**
* Sort the items on first, then second
* @param items
*/
private <T, U> void sortReport2(List<? extends Report2<T, U>> items) {
items.sort((o1, o2) -> {
int r = protectedCompare(o1.getFirst(), o2.getFirst());
if (r == 0) {
r = protectedCompare(o1.getSecond(), o2.getSecond());
}
return r;
});
}
/**
* protect against comparing zip fs with non-zip fs - these are not comparable to each other in IBM Java 8
* @return -
*/
private <T> int protectedCompare(Comparable<T> comparable, Comparable<T> comparable2) {
//debug
try {
if (isZipFs(comparable)) {
if (isZipFs(comparable2)) {
return comparable.compareTo((T) comparable2); // both zip
} else {
return 1;
}
} else {
if (isZipFs(comparable2)) {
return -1;
} else {
return comparable.compareTo((T) comparable2); // both not zip
}
}
} catch (ClassCastException e) {
//debug
System.out.format("Internal error: c1: %b c2: %b%n c1: %s%n c2: %s%n", isZipFs(comparable), isZipFs(comparable2), comparable.getClass().getName(), comparable2.getClass().getName());
throw e;
}
}
/**
* Called only for top level roots. Sub containers recurse getCandidates_processFile2.
*
* Walk the directory tree rooted at root
* - descend subdirectories
* - descend Jar file
* -- descend nested Jar files (!)
* by extracting these to a temp dir, and keeping a back reference to where they were extracted from.
*
* output the paths representing the classes to migrate:
* classes having a _Type partner
* excluding things other than .java or .classes, and excluding anything with "$" in the name
* - the path includes the "file system".
* @param root
* @throws IOException
*/
private void getAndProcessCandidatesInContainer(Container container) {
// current_paths2RootIds = top_paths2RootIds; // don't do lower, that's called within Jars etc.
if (container.isSingleJavaSource) {
getCandidates_processFile2(container.root, container);
} else {
try (Stream<Path> stream = Files.walk(container.root, FileVisitOption.FOLLOW_LINKS)) { // needed to release file handles
stream.forEachOrdered(
// only puts into the RootIds possible Fqcn (ending in either .class or .java)
p -> getCandidates_processFile2(p, container));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
// walk from root container, remove items not JCas candidates
// prunes empty rootIds and subContainer nodes
removeNonJCas(container);
if (container.candidates.size() == 0 &&
container.subContainers.size() == 0) { // above call might remove all candidates
Container parent = container.parent;
if (parent != null) {
// System.out.println("No Candidates found, removing container: " + container.toString() );
// // debug
// System.out.println("debug: " + container.rootOrig.toString());
parent.subContainers.remove(container);
}
return;
}
si(psb).append("Migrating JCas files ");
psb.append( container.isJar
? "in Jar: "
: container.isPear
? "in Pear: "
: "from root: ");
psb.append(container.rootOrig);
indent[0] += 2;
si(psb);
flush(psb);
try {
for (Path path : container.candidates) {
CommonConverted cc = getSource(path, container);
// migrate checks to see if already done, outputs a "." or some other char for the candidate
migrate(cc, container, path);
//defer any compilation to container level
if ((itemCount % 50) == 0) {
psb.append(" ").append(itemCount);
si(psb);
flush(psb);
}
itemCount++;
}
psb.append(" ").append(itemCount - 1);
flush(psb);
if (isSource) {
return; // done
}
if (!isSource &&
!container.haveDifferentCapitalizedNamesCollidingOnWindows &&
javaCompiler != null) {
boolean somethingCompiled = compileV3SourcesCommon2(container);
if (container.isPear || container.isJar) {
if (somethingCompiled) {
postProcessPearOrJar(container);
}
}
return;
}
unableToCompile = true;
return; // unable to do post processing or compiling
} finally {
indent[0] -= 2;
}
}
// removes nonJCas candidates
private void removeNonJCas(Container container) {
Iterator<Path> it = container.candidates.iterator();
while (it.hasNext()) {
String candidate = it.next().toString();
// remove non JCas classes
// //debug
// System.out.println("debug, testing to remove: " + candidate);
// if (candidate.indexOf("Corrected") >= 0) {
// if (!container._Types.contains(candidate)) {
// System.out.println("debug dumping _Types map keys to see why ... Corrected.class not there");
// System.out.println("debug key is=" + candidate);
// System.out.println("keys are:");
// int i = 0;
// for (String k : container._Types) {
// if (i == 4) {
// i = 0;
// System.out.println("");
// }
// System.out.print(k + ", ");
// }
// } else {
// System.out.println("debug container._Types did contain " + candidate);
// }
// }
if (!container.isSingleJavaSource && !container._Types.contains(candidate)) {
it.remove();
}
}
}
/**
* Called from Stream walker starting at a root or starting at an imbedded Jar or Pear.
*
* adds all the .java or .class files to the candidates, including _Type if not skipping the _Type check
*
* Handling embedded jar files
* - single level Jar (at the top level of the default file system)
* -- handle using an overlayed file system
* - embedded Jars within Jars:
* - not supported by Zip File System Provider (it only supports one level)
* - handle by extracting to a temp dir, and then using the Zip File System Provider
*
* For PEARs, check for and disallow nested PEARs; install the PEAR, set the pear classpath for
* recursive processing with the Pear.
*
* For Jar and PEAR files, use local variable + recursive call to update current_paths2RootIds map
* to new one for the Jar / Pear, and then process recursiveloy
*
* @param path the path to a .java or .class or .jar or .pear that was walked to
* @param pearClasspath - a string representing a path to the pear's classpath if there is one, or null
* @param container the container for the
* - rootIds (which have the JCas candidates) and
* - subContainers for imbedded Pears and Jars
*/
private void getCandidates_processFile2(Path path, Container container) {
String pathString = path.toString();
final boolean isPear = pathString.endsWith(".pear"); // path.endsWith does not mean this !!
final boolean isJar = pathString.endsWith(".jar");
if (isPear || isJar) {
Container subc = new Container(container, path);
getAndProcessCandidatesInContainer(subc);
return;
}
if (pathString.endsWith(isSource ? ".java" : ".class")) {
// Skip candidates except .java or .class
addToCandidates(path, container);
}
}
/**
* if _Type kind, add artifactId to set kept in current rootIdContainer
* If currently scanning within a PEAR,
* record 2-way map from unzipped path to internal path inside pear
* Used when doing pear reassembly.
*
* If currently scanning within a Jar or a PEAR,
* add unzipped path to list of all subparts for containing Jar or PEAR
* These paths are used as unique ids to things needing to be replaced in the Jar or PEAR,
* when doing re-assembly. For compiled classes migration, only, since source migration
* doesn't do re-assembly.
*
* @param path
* @param pearClassPath
*/
private void addToCandidates(Path path, Container container) {
String ps = path.toString();
if (ps.endsWith(isSource ? "_Type.java" : "_Type.class")) {
container._Types.add(isSource
? (ps.substring(0, ps.length() - 10) + ".java")
: (ps.substring(0, ps.length() - 11) + ".class"));
// if (container.isJar) {
// System.out.println("debug add container._Types " + Paths.get(ps.substring(0, ps.length() - 11)).toString() + ".class".toString() + " for Jar " + container.rootOrig.getFileName().toString());
// }
return;
}
if (ps.contains("$")) {
return; // don't add these kinds of things, they're not JCas classes
}
//debug
// if (container.isJar) {
// System.out.println("debug add candidate " + path.toString() + " for Jar " + container.rootOrig.getFileName().toString());
// }
container.candidates.add(path);
}
/**
* For Jars inside other Jars, we copy the Jar to a temp spot in the default file system
* Extracted Jar is marked delete-on-exit
*
* @param path embedded Jar to copy (only the last name is used, in constructing the temp dir)
* @return a temporary file in the local temp directory that is a copy of the Jar
* @throws IOException -
*/
private static Path getTempOutputPathForJarOrPear(Path path) throws IOException {
Path localTempDir = getTempDir();
if (path == null ) {
throw new IllegalArgumentException();
}
Path fn = path.getFileName();
if (fn == null) {
throw new IllegalArgumentException();
}
Path tempPath = Files.createTempFile(localTempDir, fn.toString(), "");
tempPath.toFile().deleteOnExit();
return tempPath;
}
private static Path getTempDir() throws IOException {
if (tempDir == null) {
tempDir = Files.createTempDirectory("migrateJCas");
tempDir.toFile().deleteOnExit();
}
return tempDir;
}
private static final CommandLineParser createCmdLineParser() {
CommandLineParser parser = new CommandLineParser();
parser.addParameter(SOURCE_FILE_ROOTS, true);
parser.addParameter(CLASS_FILE_ROOTS, true);
parser.addParameter(OUTPUT_DIRECTORY, true);
// parser.addParameter(SKIP_TYPE_CHECK, false);
parser.addParameter(MIGRATE_CLASSPATH, true);
// parser.addParameter(CLASSES, true);
return parser;
}
private final boolean checkCmdLineSyntax(CommandLineParser clp) {
if (clp.getRestArgs().length > 0) {
System.err.println("Error parsing CVD command line: unknown argument(s):");
String[] args = clp.getRestArgs();
for (int i = 0; i < args.length; i++) {
System.err.print(" ");
System.err.print(args[i]);
}
System.err.println();
return false;
}
if (!clp.isInArgsList(SOURCE_FILE_ROOTS) && !clp.isInArgsList(CLASS_FILE_ROOTS)) {
System.err.println("Neither sources file roots nor classes file roots parameters specified; please specify just one.");
return false;
}
if (clp.isInArgsList(SOURCE_FILE_ROOTS) && clp.isInArgsList(CLASS_FILE_ROOTS)) {
System.err.println("both sources file roots and classes file roots parameters specified; please specify just one.");
return false;
}
if (clp.isInArgsList(OUTPUT_DIRECTORY)) {
outputDirectory = Paths.get(clp.getParamArgument(OUTPUT_DIRECTORY)).toString();
if (!outputDirectory.endsWith("/")) {
outputDirectory = outputDirectory + "/";
}
} else {
try {
outputDirectory = Files.createTempDirectory("migrateJCasOutput").toString() + "/";
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
outDirConverted = outputDirectory + "converted/";
outDirSkipped = outputDirectory + "not-converted/";
outDirLog = outputDirectory + "logs/";
if (clp.isInArgsList(MIGRATE_CLASSPATH)) {
migrateClasspath = clp.getParamArgument(MIGRATE_CLASSPATH);
} else {
if (clp.isInArgsList(CLASS_FILE_ROOTS)) {
System.err.println("WARNING: classes file roots is specified, but the\n"
+ " migrateClasspath parameter is missing\n");
}
}
// if (clp.isInArgsList(CLASSES)) {
// individualClasses = clp.getParamArgument(CLASSES);
// }
return true;
}
// called to decompile a string of bytes.
// - first get the class name (fully qualified)
// and skip decompiling if already decompiled this class
// for this pearClasspath
// - this handles multiple class definitions, insuring
// only one decompile happens per pearClasspath (including null)
/**
* Caller does any caching to avoid this method.
*
* @param b bytecode to decompile
* @param pearClasspath to prepend to the classpath
* @return
*/
private String decompile(byte[] b, String pearClasspath) {
badClassName = false;
String classNameWithSlashes = Misc.classNameFromByteCode(b);
packageAndClassNameSlash = classNameWithSlashes;
ClassLoader cl = getClassLoader(pearClasspath);
UimaDecompiler ud = new UimaDecompiler(cl, null);
if (classNameWithSlashes == null || classNameWithSlashes.length() < 2) {
System.err.println("Failed to extract class name from binary code, "
+ "name found was \"" + ((classNameWithSlashes == null) ? "null" : classNameWithSlashes)
+ "\"\n byte array was:");
System.err.println(Misc.dumpByteArray(b, 2000));
badClassName = true;
}
return ud.decompileToString(classNameWithSlashes, b);
}
/**
* The classloader to use in decompiling, if it is provided, is one that delegates first
* to the parent. This may need fixing for PEARs
* @return classloader to use for migrate decompiling
*/
private ClassLoader getClassLoader(String pearClasspath) {
if (null == pearClasspath) {
if (null == cachedMigrateClassLoader) {
cachedMigrateClassLoader = (null == migrateClasspath)
? this.getClass().getClassLoader()
: new UIMAClassLoader(Misc.classpath2urls(migrateClasspath));
}
return cachedMigrateClassLoader;
} else {
try {
return new UIMAClassLoader((null == migrateClasspath)
? pearClasspath
: (pearClasspath + File.pathSeparator + migrateClasspath));
} catch (MalformedURLException e) {
throw new UIMARuntimeException(e);
}
}
}
private void addImport(String s) {
cu.getImports().add(new ImportDeclaration(new Name(s), false, false));
}
private void removeImport(String s) {
Iterator<ImportDeclaration> it = cu.getImports().iterator();
while (it.hasNext()) {
ImportDeclaration impDcl = it.next();
if (impDcl.getNameAsString().equals(s)) {
it.remove();
break;
}
}
}
/******************
* AST Utilities
******************/
private Node replaceInParent(Node n, Expression v) {
Optional<Node> maybeParent = n.getParentNode();
if (maybeParent.isPresent()) {
Node parent = n.getParentNode().get();
if (parent instanceof EnclosedExpr) {
((EnclosedExpr)parent).setInner(v);
} else if (parent instanceof MethodCallExpr) { // args in the arg list
List<Expression> args = ((MethodCallExpr)parent).getArguments();
args.set(args.indexOf(n), v);
v.setParentNode(parent);
} else if (parent instanceof ExpressionStmt) {
((ExpressionStmt)parent).setExpression(v);
} else if (parent instanceof CastExpr) {
((CastExpr)parent).setExpression(v);
} else if (parent instanceof ReturnStmt) {
((ReturnStmt)parent).setExpression(v);
} else if (parent instanceof AssignExpr) {
((AssignExpr)parent).setValue(v);
} else if (parent instanceof VariableDeclarator) {
((VariableDeclarator)parent).setInitializer(v);
} else if (parent instanceof ObjectCreationExpr) {
List<Expression> args = ((ObjectCreationExpr)parent).getArguments();
int i = args.indexOf(n);
if (i < 0) throw new RuntimeException();
args.set(i, v);
} else {
System.out.println(parent.getClass().getName());
throw new RuntimeException();
}
return v;
}
System.out.println("internal error replacing in parent: no parent for node: " + n.getClass().getName());
System.out.println(" node: " + n.toString());
System.out.println(" expression replacing: " + v.toString());
throw new RuntimeException();
}
/**
*
* @param p the parameter to modify
* @param t the name of class or interface
* @param name the name of the variable
*/
private void setParameter(List<Parameter> ps, int i, String t, String name) {
Parameter p = ps.get(i);
p.setType(new ClassOrInterfaceType(t));
p.setName(new SimpleName(name));
}
private int findConstructor(NodeList<BodyDeclaration<?>> classMembers) {
int i = 0;
for (BodyDeclaration<?> bd : classMembers) {
if (bd instanceof ConstructorDeclaration) {
return i;
}
i++;
}
return -1;
}
private boolean hasTypeFields(NodeList<BodyDeclaration<?>> members) {
boolean hasType = false;
boolean hasTypeId = false;
for (BodyDeclaration<?> bd : members) {
if (bd instanceof FieldDeclaration) {
FieldDeclaration f = (FieldDeclaration)bd;
EnumSet<Modifier> m = f.getModifiers();
if (m.contains(Modifier.PUBLIC) &&
m.contains(Modifier.STATIC) &&
m.contains(Modifier.FINAL)
// &&
// getTypeName(f.getType()).equals("int")
) {
List<VariableDeclarator> vds = f.getVariables();
for (VariableDeclarator vd : vds) {
if (vd.getType().equals(intType)) {
String n = vd.getNameAsString();
if (n.equals("type")) hasType = true;
if (n.equals("typeIndexID")) hasTypeId = true;
if (hasTypeId && hasType) {
return true;
}
}
}
}
}
} // end of for
return false;
}
/**
* Heuristic:
* JCas classes have 0, 1, and 2 arg constructors with particular arg types
* 0 -
* 1 - JCas
* 2 - int, TOP_Type (v2) or TypeImpl, CASImpl (v3)
*
* Additional 1 and 2 arg constructors are permitted.
*
* Sets fields hasV2Constructors, hasV3Constructors
* @param members
*/
private void setHasJCasConstructors(NodeList<BodyDeclaration<?>> members) {
boolean has0ArgConstructor = false;
boolean has1ArgJCasConstructor = false;
boolean has2ArgJCasConstructorV2 = false;
boolean has2ArgJCasConstructorV3 = false;
for (BodyDeclaration<?> bd : members) {
if (bd instanceof ConstructorDeclaration) {
List<Parameter> ps = ((ConstructorDeclaration)bd).getParameters();
if (ps.size() == 0) has0ArgConstructor = true;
if (ps.size() == 1 && getParmTypeName(ps, 0).equals("JCas")) {
has1ArgJCasConstructor = true;
}
if (ps.size() == 2) {
if (getParmTypeName(ps, 0).equals("int") &&
getParmTypeName(ps, 1).equals("TOP_Type")) {
has2ArgJCasConstructorV2 = true;
} else if (getParmTypeName(ps, 0).equals("TypeImpl") &&
getParmTypeName(ps, 1).equals("CASImpl")) {
has2ArgJCasConstructorV3 = true;
}
} // end of 2 arg constructor
} // end of is-constructor
} // end of for loop
hasV2Constructors = has0ArgConstructor && has1ArgJCasConstructor && has2ArgJCasConstructorV2;
hasV3Constructors = has0ArgConstructor && has1ArgJCasConstructor && has2ArgJCasConstructorV3;
}
private String getParmTypeName(List<Parameter> p, int i) {
return getTypeName(p.get(i).getType());
}
private String getTypeName(Type t) {
// if (t instanceof ReferenceType) {
// t = ((ReferenceType<?>)t).getType();
// }
if (t instanceof PrimitiveType) {
return ((PrimitiveType)t).toString();
}
if (t instanceof ClassOrInterfaceType) {
return ((ClassOrInterfaceType)t).getNameAsString();
}
Misc.internalError(); return null;
}
/**
* Get the name of a field
* @param e -
* @return the field name or null
*/
private String getName(Expression e) {
e = getUnenclosedExpr(e);
if (e instanceof NameExpr) {
return ((NameExpr)e).getNameAsString();
}
if (e instanceof FieldAccessExpr) {
return ((FieldAccessExpr)e).getNameAsString();
}
return null;
}
/**
* Called on Annotation Decl, Class/intfc decl, empty type decl, enum decl
* Does nothing unless at top level of compilation unit
*
* Otherwise, adds an entry to c2ps for the classname and package, plus full path
*
* @param n type being declared
*/
private void updateClassName(TypeDeclaration<?> n) {
Optional<Node> pnode = n.getParentNode();
Node node;
if (pnode.isPresent() &&
(node = pnode.get()) instanceof CompilationUnit) {
CompilationUnit cu2 = (CompilationUnit) node;
className = cu2.getType(0).getNameAsString();
String packageAndClassName =
(className.contains("."))
? className
: packageName + '.' + className;
packageAndClassNameSlash = packageAndClassName.replace('.', '/');
// assert current_cc.fqcn_slash == null; // for decompiling, already set
assert (current_cc.fqcn_slash != null) ? current_cc.fqcn_slash.equals(packageAndClassNameSlash) : true;
current_cc.fqcn_slash = packageAndClassNameSlash;
TypeImpl ti = TypeSystemImpl.staticTsi.getType(Misc.javaClassName2UimaTypeName(packageAndClassName));
if (null != ti) {
// is a built-in type
// ContainerAndPath p = new ContainerAndPath(
// current_path,
// current_container,packageAndClassNameSlash,
// current_cc.,
// current_cc.pearClasspath);
skippedBuiltins.add(new PathContainerAndReason(current_path, current_container, "built-in"));
isBuiltinJCas = true;
isConvert2v3 = false;
return;
} else {
VariableDeclarator vd_typename = new VariableDeclarator(
stringType, "_TypeName", new StringLiteralExpr(packageAndClassName));
fi_fields.add(new FieldDeclaration(public_static_final, vd_typename));
}
return;
}
return;
}
private Expression getExpressionFromStmt(Statement stmt) {
stmt = getStmtFromStmt(stmt);
if (stmt instanceof ExpressionStmt) {
return getUnenclosedExpr(((ExpressionStmt)stmt).getExpression());
}
return null;
}
private Expression getUnenclosedExpr(Expression e) {
while (e instanceof EnclosedExpr) {
e = ((EnclosedExpr)e).getInner().get();
}
return e;
}
/**
* Unwrap (possibly nested) 1 statement blocks
* @param stmt -
* @return unwrapped (non- block) statement
*/
private Statement getStmtFromStmt(Statement stmt) {
while (stmt instanceof BlockStmt) {
NodeList<Statement> stmts = ((BlockStmt) stmt).getStatements();
if (stmts.size() == 1) {
stmt = stmts.get(0);
continue;
}
return null;
}
return stmt;
}
private void addCastExpr(Statement stmt, Type castType) {
ReturnStmt rstmt = (ReturnStmt) stmt;
Optional<Expression> o_expr = rstmt.getExpression();
Expression expr = o_expr.isPresent() ? o_expr.get() : null;
CastExpr ce = new CastExpr(castType, expr);
rstmt.setExpression(ce); // removes the parent link from expr
if (expr != null) {
expr.setParentNode(ce); // restore it
}
}
/********************
* Recording results
********************/
private void recordBadConstructor(String msg) {
reportMigrateFailed("Constructor is incorrect, " + msg);
}
// private void reportParseException() {
// reportMigrateFailed("Unparsable Java");
// }
private void migrationFailed(String reason) {
failedMigration.add(new PathContainerAndReason(current_path, current_container, reason));
isConvert2v3 = false;
}
private void reportMigrateFailed(String m) {
System.out.format("Skipping this file due to error: %s, path: %s%n", m, current_path);
migrationFailed(m);
}
private void reportV2Class() {
// v2JCasFiles.add(current_path);
isV2JCas = true;
}
private void reportV3Class() {
// v3JCasFiles.add(current_path);
isConvert2v3 = true;
}
private void reportNotJCasClass(String reason) {
nonJCasFiles.add(new PathContainerAndReason(current_path, current_container, reason));
isConvert2v3 = false;
}
private void reportNotJCasClassMissingTypeFields() {
reportNotJCasClass("missing required type and/or typeIndexID static fields");
}
private void reportDeletedCheckModified(String m) {
deletedCheckModified.add(new PathContainerAndReason(current_path, current_container, m));
}
private void reportMismatchedFeatureName(String m) {
manualInspection.add(new PathContainerAndReason(current_path, current_container,
"This getter/setter name doesn't match internal feature name: " + m));
}
private void reportUnrecognizedV2Code(String m) {
migrationFailed("V2 code not recognized:\n" + m);
}
private void reportPathWorkaround(String orig, String modified) {
pathWorkaround.add(new String1AndString2(orig, modified));
}
private void reportPearOrJarClassReplace(String pearOrJar, String classname, Container kind) { // pears or jars
if (kind.isPear) {
pearClassReplace.add(new String1AndString2(pearOrJar, classname));
} else {
jarClassReplace.add(new String1AndString2(pearOrJar, classname));
}
}
/***********************************************/
/**
* Output directory for source and migrated files
* Consisting of converted/skipped, v2/v3, a+cc.id, slashified classname
* @param cc -
* @param isV2 -
* @param wasConverted -
* @return converted/skipped, v2/v3, a+cc.id, slashified classname
*/
private String getBaseOutputPath(CommonConverted cc, boolean isV2, boolean wasConverted) {
StringBuilder sb = new StringBuilder();
sb.append(wasConverted ? outDirConverted : outDirSkipped);
sb.append(isV2 ? "v2/" : "v3/");
sb.append("a").append(cc.getId()).append('/');
sb.append(cc.fqcn_slash).append(".java");
return sb.toString();
}
private void writeV2Orig(CommonConverted cc, boolean wasConverted) throws IOException {
String base = getBaseOutputPath(cc, true, wasConverted); // adds numeric suffix if dupls
FileUtils.writeToFile(makePath(base), cc.v2Source);
}
private void writeV3(CommonConverted cc) throws IOException {
String base = getBaseOutputPath(cc, false, true);
cc.v3SourcePath = makePath(base);
String data = fixImplementsBug(cc.v3Source);
FileUtils.writeToFile(cc.v3SourcePath, data);
}
private void printUsage() {
System.out.println(
"Usage: java org.apache.uima.migratev3.jcas.MigrateJCas \n"
+ " [-sourcesRoots <One-or-more-directories-or-jars-separated-by-Path-separator, or a path to a single JCas source class>]\n"
+ " [-classesRoots <One-or-more-directories-or-jars-or-pears-separated-by-Path-separator>]\n"
+ " [-outputDirectory a-writable-directory-path (optional)\n"
+ " if omitted, a temporary directory is used\n"
+ " if not omitted, the directory contents WILL BE ERASED at the start.\n"
+ " [-migrateClasspath a-class-path to use in decompiling, when -classesRoots is specified\n"
+ " also used when compiling the migrated classes.\n"
+ " NOTE: either -sourcesRoots or -classesRoots is required, but only one may be specified.\n"
+ " NOTE: classesRoots are scanned for JCas classes, which are then decompiled, and the results processed like sourcesRoots\n"
);
}
private static final Pattern implementsEmpty = Pattern.compile("implements \\{");
private String fixImplementsBug(String data) {
return implementsEmpty.matcher(data).replaceAll("{");
}
/*********************************************************************
* Reporting classes
*********************************************************************/
private static abstract class Report2<T, U> {
public abstract Comparable<T> getFirst(); // Eclipse on linux complained if not public, was OK on windows
public abstract Comparable<U> getSecond();
abstract int getFirstLength();
}
private static class PathContainerAndReason extends Report2<ContainerAndPath, String> {
final ContainerAndPath cap;
final String reason;
PathContainerAndReason(ContainerAndPath cap, String reason) {
this.cap = cap;
this.reason = reason;
}
PathContainerAndReason(Path path, Container container, String reason) {
this(new ContainerAndPath(path, container), reason);
}
@Override
public Comparable<ContainerAndPath> getFirst() { return cap; }
@Override
public Comparable<String> getSecond() { return reason; }
@Override
int getFirstLength() { return cap.toString().length(); }
}
private static class String1AndString2 extends Report2<String, String> {
String s1;
String s2;
String1AndString2(String s1, String s2) {
this.s1 = s1;
this.s2 = s2;
}
@Override
public Comparable<String> getFirst() { return s1; }
@Override
public Comparable<String> getSecond() { return s2; }
@Override
int getFirstLength() { return s1.toString().length(); }
}
private static void withIOX(Runnable_withException r) {
try {
r.run();
} catch (Exception e) {
throw new UIMARuntimeException(e);
}
}
private int findFirstCharDifferent(String s1, String s2) {
int s1l = s1.length();
int s2l = s2.length();
for (int i = 0;;i++) {
if (i == s1l || i == s2l) {
return i;
}
if (s1.charAt(i) != s2.charAt(i)) {
return i;
}
}
}
// private String drop_Type(String s) {
// return s.substring(0, isSource ? "_Type.java".length()
// : "_Type.class".length()) +
// (isSource ? ".java" : ".class");
// }
///*****************
//* Root-id
//*****************/
//private static int nextRootId = 1;
//
///***********************************************************************
//* Root-id - this is the path part up to the start of the package name.
//* - it is relative to container
//* - has the collection of artifacts that might be candidates, having this rootId
//* - has the collection of _Type things having this rootId
//* - "null" path is OK - means package name starts immediately
//* There is no Root-id for path ending in Jar or PEAR - these created containers instead
//***********************************************************************/
//private static class RootId {
// final int id = nextRootId++;
// /**
// * The path relative to the the container (if any) (= Jar or Pear)
// * - for Pears, the path is as if it was not installed, but within the PEAR file
// */
// final Path path;
//
// /** The container holding this RootId */
// final Container container;
// /**
// * For this rootId, all of the fully qualified classnames that are migration eligible.
// * - not all might be migrated, if upon further inspection they are not JCas class files.
// */
// final Set<Fqcn> fqcns = new HashSet<>();
// final Set<String> fqcns_ignore_case = new HashSet<>();
// boolean haveDifferentCapitalizedNamesCollidingOnWindows = false;
//
// RootId(Path path, Container container) {
// this.path = path;
// this.container = container;
// }
//
// /* (non-Javadoc)
// * @see java.lang.Object#toString()
// */
// @Override
// public String toString() {
// return "RootId [id="
// + id
// + ", path="
// + path
// + ", container="
// + container.id
// + ", fqcns="
// + Misc.ppList(Misc.setAsList(fqcns))
// + ", fqcns_Type="
// + Misc.ppList(Misc.setAsList(fqcns_Type))
// + "]";
// }
//
// void add(Fqcn fqcn) {
// boolean wasNotPresent = fqcns.add(fqcn);
// boolean lc = fqcns_ignore_case.add(fqcn.fqcn_dots.toLowerCase());
// if (!lc && wasNotPresent) {
// haveDifferentCapitalizedNamesCollidingOnWindows = true;
// }
// }
//
// boolean hasMatching_Type(Fqcn fqcn) {
//
// }
//}
///**
//* Called from Stream walker starting at a root or starting at an imbedded Jar or Pear.
//*
//* adds all the .java or .class files to the candidates, including _Type if not skipping the _Type check
//* Handling embedded jar files
//* - single level Jar (at the top level of the default file system)
//* -- handle using an overlayed file system
//* - embedded Jars within Jars:
//* - not supported by Zip File System Provider (it only supports one level)
//* - handle by extracting to a temp dir, and then using the Zip File System Provider
//* @param path the path to a .java or .class or .jar or .pear
//* @param pearClasspath - a string representing a path to the pear's classpath if there is one, or null
//*/
//private void getCandidates_processFile(Path path, String pearClasspath) {
//// if (path.toString().contains("commons-httpclient-3.1.jar"))
//// System.out.println("Debug: " + path.toString());
//// System.out.println("debug processing " + path);
// try {
//// URI pathUri = path.toUri();
// String pathString = path.toString();
// final boolean isPear = pathString.endsWith(".pear"); // path.endsWith does not mean this !!
// final boolean isJar = pathString.endsWith(".jar");
//
// if (isJar || isPear) {
// if (!path.getFileSystem().equals(FileSystems.getDefault())) {
// // embedded Pear or Jar: extract to temp
// Path out = getTempOutputPathForJar(path);
// Files.copy(path, out, StandardCopyOption.REPLACE_EXISTING);
//// embeddedJars.add(new PathAndPath(path, out));
// path = out; // path points to pear or jar
// }
//
// Path start;
// final String localPearClasspath;
// if (isPear) {
// if (pearClasspath != null) {
// throw new UIMARuntimeException("Nested PEAR files not supported");
// }
//
//// pear_current = new PearOrJar(path);
//// pears.add(pear_current);
// // add pear classpath info
// File pearInstallDir = Files.createTempDirectory(getTempDir(), "installedPear").toFile();
// PackageBrowser ip = PackageInstaller.installPackage(pearInstallDir, path.toFile(), false);
// localPearClasspath = ip.buildComponentClassPath();
// String[] children = pearInstallDir.list();
// if (children == null || children.length != 1) {
// Misc.internalError();
// }
// pearResolveStart = Paths.get(pearInstallDir.getAbsolutePath(), children[0]);
//
// start = pearInstallDir.toPath();
// } else {
// if (isJar) {
// PearOrJar jarInfo = new PearOrJar(path);
// pear_or_jar_current_stack.push(jarInfo);
// jars.add(jarInfo);
// }
//
// localPearClasspath = pearClasspath;
// FileSystem jfs = FileSystems.newFileSystem(Paths.get(path.toUri()), null);
// start = jfs.getPath("/");
// }
//
// try (Stream<Path> stream = Files.walk(start)) { // needed to release file handles
// stream.forEachOrdered(
// p -> getCandidates_processFile(p, localPearClasspath));
// }
// if (isJar) {
// pear_or_jar_current_stack.pop();
// }
// if (isPear) {
// pear_current = null;
// }
// } else {
// // is not a .jar or .pear file. add .java or .class files to initial candidate set
// // will be filtered additionally later
//// System.out.println("debug path ends with java or class " + pathString.endsWith(isSource ? ".java" : ".class") + " " + pathString);
// if (pathString.endsWith(isSource ? ".java" : ".class")) {
// candidates.add(new Candidate(path, pearClasspath));
// if (!isSource && null != pear_current) {
// // inside a pear, which has been unzipped into pearInstallDir;
// path2InsidePearOrJarPath.put(path.toString(), pearResolveStart.relativize(path).toString());
// pear_current.pathsToCandidateFiles.add(path.toString());
// }
//
// if (!isSource && pear_or_jar_current_stack.size() > 0) {
// // inside a jar, not contained in a pear
// pear_or_jar_current_stack.getFirst().pathsToCandidateFiles.add(path.toString());
// }
// }
// }
// } catch (IOException e) {
// throw new RuntimeException(e);
// }
//}
//private void postProcessPearsOrJars(String kind, List<PearOrJar> pearsOrJars, List<String1AndString2> classReplace) { // pears or jars
//try {
// Path outDir = Paths.get(outputDirectory, kind);
// FileUtils.deleteRecursive(outDir.toFile());
// Files.createDirectories(outDir);
//} catch (IOException e) {
// throw new RuntimeException(e);
//}
//
//// pearsOrJars may have entries with 0 candidate paths. This happens when we scan them
//// but find nothing to convert.
//// eliminate these.
//
//Iterator<PearOrJar> it = pearsOrJars.iterator();
//while (it.hasNext()) {
// PearOrJar poj = it.next();
// if (poj.pathsToCandidateFiles.size() == 0) {
// it.remove();
// } else {
//// //debug
//// if (poj.pathToPearOrJar.toString().contains("commons-httpclient-3.1")) {
//// System.err.println("debug found converted things inside commons-httpclient");;
//// for (String x : poj.pathsToCandidateFiles) {
//// System.err.println(x);
//// }
//// System.err.println("");
//// }
// }
//}
//
//it = pearsOrJars.iterator();
//while (it.hasNext()) {
// PearOrJar poj = it.next();
// if (poj.pathsToCandidateFiles.size() == 0) {
// System.err.print("debug failed to remove unconverted Jar");
// }
//}
//
//if (pearsOrJars.size() == 0) {
// System.out.format("No .class files were replaced in %s.%n", kind);
//} else {
// System.out.format("replacing .class files in %,d %s%n", pearsOrJars.size(), kind);
// for (PearOrJar p : pearsOrJars) {
// pearOrJarPostProcessing(p, kind);
// }
// try {
// reportPaths("Reports of updated " + kind, kind + "FileUpdates.txt", classReplace);
//
// } catch (IOException e) {
// throw new RuntimeException(e);
// }
//}
//
//}
///**
//* When running the compiler to compile v3 sources, we need a classpath that at a minimum
//* includes uimaj-core. The strategy is to use the invoker of this tool's classpath as
//* specified from the application class loader
//* @return true if no errors
//*/
//private boolean compileV3SourcesCommon(List<ClassnameAndPath> items, String msg, String pearClasspath) {
//
// if (items.size() == 0) {
// return true;
// }
// System.out.format("Compiling %,d classes %s -- This may take a while!%n", c2ps.size(), msg);
// StandardJavaFileManager fileManager = javaCompiler.getStandardFileManager(null, null, Charset.forName("UTF-8"));
//
// List<String> cus = items.stream()
// .map(c -> outDirConverted + "v3/" + c.classname + ".java")
// .collect(Collectors.toList());
//
// Iterable<String> compilationUnitStrings = cus;
//
// Iterable<? extends JavaFileObject> compilationUnits =
// fileManager.getJavaFileObjectsFromStrings(compilationUnitStrings);
//
// // specify where the output classes go
// String classesBaseDir = outDirConverted + "v3-classes";
// try {
// Files.createDirectories(Paths.get(classesBaseDir));
// } catch (IOException e) {
// throw new UIMARuntimeException(e);
// }
// // specify the classpath
// String classpath = getCompileClassPath(pearClasspath);
// Iterable<String> options = Arrays.asList("-d", classesBaseDir,
// "-classpath", classpath);
// return javaCompiler.getTask(null, fileManager, null, options, null, compilationUnits).call();
//}
///**
//* Called after class is migrated
//* Given a path to a class (source or class file),
//* return the URL to the class as found in the classpath.
//* This returns the "first" one found in the classpath, in the case of duplicates.
//* @param path
//* @return the location of the class in the class path
//*/
//private URL getPathForClass(Path path) {
// return (null == packageAndClassNameSlash)
// ? null
// : migrateClassLoader.getResource(packageAndClassNameSlash + ".class");
//}
//private void getBaseOutputPath() {
//String s = packageAndClassNameSlash;
//int i = 0;
//while (!usedPackageAndClassNames.add(s)) {
// i = i + 1;
// s = packageAndClassNameSlash + "_dupid_" + i;
//}
//packageAndClassNameSlash_i = i;
//}
//private String prepareIndividual(String classname) {
//candidate = new Candidate(Paths.get(classname)); // a pseudo path
//packageName = null;
//className = null;
//packageAndClassNameSlash = null;
//cu = null;
//return decompile(classname); // always look up in classpath
// // to decompile individual source - put in sourcesRoots
//}
//if (!isSource) { // skip this recording if source
//if (null != pear_current) {
// // inside a pear, which has been unzipped into a temporary pearInstallDir;
// // we don't want that temporary dir to be part of the path.
// path2InsidePearOrJarPath.put(path.toString(), pearResolveStart.relativize(path).toString());
// pear_current.pathsToCandidateFiles.add(path.toString());
//}
//
//if (!isSource && pear_or_jar_current_stack.size() > 0) {
// // inside a jar, not contained in a pear
// pear_or_jar_current_stack.getFirst().pathsToCandidateFiles.add(path.toString());
//}
//}
//}
///**
//* For a given candidate, use its path:
//* switch the ...java to ..._Type.java, or ...class to ..._Type.class
//* look thru all the candidates
//* @param cand
//* @param start
//* @return
//*/
//private boolean has_Type(Candidate cand, int start) {
// if (start >= candidates.size()) {
// return false;
// }
//
// String sc = cand.p.toString();
// String sc_minus_suffix = sc.substring(0, sc.length() - ( isSource ? ".java".length() : ".class".length()));
// String sc_Type = sc_minus_suffix + ( isSource ? "_Type.java" : "_Type.class");
// // a string which sorts beyond the candidate + a suffix of "_"
// String s_end = sc_minus_suffix + (char) (((int)'_') + 1);
// for (Candidate c : candidates.subList(start, candidates.size())) {
// String s = c.p.toString();
// if (s_end.compareTo(s) < 0) {
// return false; // not found, we're already beyond where it would be found
// }
// if (s.equals(sc_Type)) {
// return true;
// }
// }
// return false;
//}
//private final static Comparator<Candidate> pathComparator = new Comparator<Candidate>() {
//@Override
//public int compare(Candidate o1, Candidate o2) {
// return o1.p.toString().compareTo(o2.p.toString());
//}
//};
//// there may be several same-name roots not quite right
//// xxx_Type$1.class
//
//private void addIfPreviousIsSameName(List<Path> c, int i) {
//if (i == 0) return;
//String _Type = candidates.get(i).toString();
////String prev = r.get(i-1).getPath();
//String prefix = _Type.substring(0, _Type.length() - ("_Type." + (isSource ? "java" : "class")).length());
//i--;
//while (i >= 0) {
// String s = candidates.get(i).toString();
// if ( ! s.startsWith(prefix)) {
// break;
// }
// if (s.substring(prefix.length()).equals((isSource ? ".java" : ".class"))) {
// c.add(candidates.get(i));
// break;
// }
// i--;
//}
//}
//
//for (int i = 0; i < pearOrJar.pathsToCandidateFiles.size(); i++) {
// String candidatePath = pearOrJar.pathsToCandidateFiles.get(i);
// String path_in_v3_classes = isPear
// ? getPath_in_v3_classes(candidatePath)
// : candidatePath;
//
// Path src = Paths.get(outputDirectory, "converted/v3-classes", path_in_v3_classes
// + (isPear ? ".class" : ""));
// Path tgt = pfs.getPath(
// "/",
// isPear
// ? path2InsidePearOrJarPath.get(candidatePath) // needs to be bin/org/... etc
// : candidatePath); // needs to be org/... etc
// if (Files.exists(src)) {
// Files.copy(src, tgt, StandardCopyOption.REPLACE_EXISTING);
// reportPearOrJarClassReplace(pearOrJarCopy.toString(), path_in_v3_classes, kind);
// }
//}
///** for compiled mode, do recompiling and reassembly of Jars and Pears */
//
//private boolean compileAndReassemble(CommonConverted cc, Container container, Path path) {
// boolean noErrors = true;
// if (javaCompiler != null) {
// if (container.haveDifferentCapitalizedNamesCollidingOnWindows) {
// System.out.println("Skipping compiling / reassembly because class " + container.toString() + " has multiple names differing only in capitalization, please resolve first.");
// } else {
//
//
// noErrors = compileV3PearSources(container, path);
// noErrors = noErrors && compileV3NonPearSources(container, path);
//
// postProcessPearsOrJars("jars" , jars , jarClassReplace);
// postProcessPearsOrJars("pears", pears, pearClassReplace);
//
////
//// try {
//// Path pearOutDir = Paths.get(outputDirectory, "pears");
//// FileUtils.deleteRecursive(pearOutDir.toFile());
//// Files.createDirectories(pearOutDir);
//// } catch (IOException e) {
//// throw new RuntimeException(e);
//// }
////
//// System.out.format("replacing .class files in %,d PEARs%n", pears.size());
//// for (PearOrJar p : pears) {
//// pearOrJarPostProcessing(p);
//// }
//// try {
//// reportPaths("Reports of updated Pears", "pearFileUpdates.txt", pearClassReplace);
//// } catch (IOException e) {
//// throw new RuntimeException(e);
//// }
// }
// }
//
// return noErrors;
//}
///**
//* @return true if no errors
//*/
//private boolean compileV3PearSources() {
// boolean noError = true;
// Map<String, List<ClassnameAndPath>> p2c = c2ps.stream()
// .filter(c -> c.pearClasspath != null)
// .collect(Collectors.groupingBy(c -> c.pearClasspath));
//
// List<Entry<String, List<ClassnameAndPath>>> ea = p2c.entrySet().stream()
// .sorted(Comparator.comparing(Entry::getKey)) //(e1, e2) -> e1.getKey().compareTo(e2.getKey())
// .collect(Collectors.toList());
//
// for (Entry<String, List<ClassnameAndPath>> e : ea) {
// noError = noError && compileV3SourcesCommon(e.getValue(), "for Pear " + e.getKey(), e.getKey() );
// }
// return noError;
//}
//
///**
//* @return true if no errors
//*/
//private boolean compileV3NonPearSources() {
//
// List<ClassnameAndPath> cnps = c2ps.stream()
// .filter(c -> c.pearClasspath == null)
// .collect(Collectors.toList());
//
// return compileV3SourcesCommon(cnps, "(non PEAR)", null);
//}
///**
//* @param pathInPear a complete path to a class inside an (installed) pear
//* @return the part starting after the top node of the install dir
//*/
//private String getPath_in_v3_classes(String pathInPear) {
// return path2classname.get(pathInPear);
//}
//private boolean reportDuplicates() throws IOException {
//List<List<CommonConverted>> nonIdenticals = new ArrayList<>();
//List<CommonConverted> onlyIdenticals = new ArrayList<>();
//
//classname2multiSources.forEach(
// (classname, ccs) -> {
// if (ccs.size() > 1) {
// nonIdenticals.add(ccs);
// } else {
// CommonConverted cc = ccs.get(0);
// if (cc.containersAndV2Paths.size() > 1)
// onlyIdenticals.add(cc); // the same item in multiple containers and/or paths
// }
// }
// );
//
//if (nonIdenticals.size() == 0) {
// if (onlyIdenticals.size() == 0) {
// System.out.println("There were no duplicates found.");
// } else {
// // report identical duplicates
// try (BufferedWriter bw = Files.newBufferedWriter(makePath(outDirLog + "identical_duplicates.txt"), StandardOpenOption.CREATE)) {
// logPrintNl("Report of Identical duplicates:", bw);
// for (CommonConverted cc : onlyIdenticals) {
// int i = 0;
// logPrintNl("Class: " + cc.fqcn_slash, bw);
// for (ContainerAndPath cp : cc.containersAndV2Paths) {
// logPrintNl(" " + (++i) + " " + cp, bw);
// }
// logPrintNl("", bw);
// }
// }
// }
// return true;
//}
//
//// non-identicals, print out all of them
//try (BufferedWriter bw = Files.newBufferedWriter(makePath(outDirLog + "nonIdentical_duplicates.txt"), StandardOpenOption.CREATE)) {
// logPrintNl("Report of non-identical duplicates", bw);
// for (List<CommonConverted> nonIdentical : nonIdenticals) {
// String fqcn = nonIdentical.get(0).fqcn_slash;
// logPrintNl(" classname: " + fqcn, bw);
// int i = 1;
// // for each cc, and within each cc, for each containerAndPath
// for (CommonConverted cc : nonIdentical) {
//// logPrintNl(" version " + i, bw);
// assert fqcn.equals(cc.fqcn_slash);
// int j = 1;
// boolean isSame = cc.containersAndV2Paths.size() > 1;
// boolean isFirstTime = true;
// for (ContainerAndPath cp : cc.containersAndV2Paths) {
// String first = isSame && isFirstTime
// ? " same: "
// : isSame
// ? " "
// : " ";
// isFirstTime = false;
// logPrintNl(first + i + "." + (j++) + " " + cp, bw);
// }
// indent[0] -= 6;
//// logPrintNl("", bw);
// i++;
// }
//// logPrintNl("", bw);
// }
//}
//return false;
//}
//private static class PathAndReason extends Report2<Path, String> {
//Path path;
//String reason;
//PathAndReason(Path path, String reason) {
// this.path = path;
// this.reason = reason;
//}
//@Override
//public Comparable<Path> getFirst() { return path; }
//@Override
//public Comparable<String> getSecond() { return reason; }
//@Override
//int getFirstLength() { return path.toString().length(); }
//}
}
| [UIMA-6373] Format UIMA Core Java SDK codebase
- Auto-format
| uimaj-v3migration-jcas/src/main/java/org/apache/uima/migratev3/jcas/MigrateJCas.java | [UIMA-6373] Format UIMA Core Java SDK codebase |
|
Java | apache-2.0 | 38f0c3f00a79a74b2b7f1dad04209ed4c18ae81d | 0 | mpi2/PhenotypeData,mpi2/PhenotypeData,mpi2/PhenotypeData,mpi2/PhenotypeData,mpi2/PhenotypeData,mpi2/PhenotypeData | /*******************************************************************************
* Copyright 2015 EMBL - European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*******************************************************************************/
package org.mousephenotype.cda.indexers;
import org.apache.commons.lang3.StringUtils;
import org.apache.solr.client.solrj.SolrServer;
import org.apache.solr.client.solrj.SolrServerException;
import org.mousephenotype.cda.constants.Constants;
import org.mousephenotype.cda.enumerations.BiologicalSampleType;
import org.mousephenotype.cda.enumerations.SexType;
import org.mousephenotype.cda.enumerations.ZygosityType;
import org.mousephenotype.cda.indexers.exceptions.IndexerException;
import org.mousephenotype.cda.indexers.utils.IndexerMap;
import org.mousephenotype.cda.solr.service.dto.ImpressBaseDTO;
import org.mousephenotype.cda.solr.service.dto.ObservationDTOWrite;
import org.mousephenotype.cda.solr.service.dto.ParameterDTO;
import org.mousephenotype.cda.utilities.CommonUtils;
import org.mousephenotype.cda.utilities.RunStatus;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import javax.sql.DataSource;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.*;
/**
* Populate the experiment core
*/
public class ObservationIndexer extends AbstractIndexer {
public static final String DATETIME_FORMAT = "yyyy-MM-dd HH:mm:ss.S";
private final org.slf4j.Logger logger = LoggerFactory.getLogger(this.getClass());
private static Connection connection;
CommonUtils commonUtils = new CommonUtils();
@Autowired
@Qualifier("komp2DataSource")
DataSource komp2DataSource;
@Autowired
@Qualifier("observationIndexing")
SolrServer observationSolrServer;
Map<String, BiologicalDataBean> biologicalData = new HashMap<>();
Map<String, BiologicalDataBean> lineBiologicalData = new HashMap<>();
Map<Integer, ImpressBaseDTO> pipelineMap = new HashMap<>();
Map<Integer, ImpressBaseDTO> procedureMap = new HashMap<>();
Map<Integer, ParameterDTO> parameterMap = new HashMap<>();
Map<Integer, DatasourceBean> datasourceMap = new HashMap<>();
Map<Integer, DatasourceBean> projectMap = new HashMap<>();
Map<Integer, List<ParameterAssociationBean>> parameterAssociationMap = new HashMap<>();
Map<Integer, List<WeightBean>> weightMap = new HashMap<>();
Map<Integer, WeightBean> ipgttWeightMap = new HashMap<>();
Map<String, Map<String, String>> translateCategoryNames = new HashMap<>();
public static final String ipgttWeightParameter = "IMPC_IPG_001_001";
public static final List<String> maleFertilityParameters = Arrays.asList("IMPC_FER_001_001", "IMPC_FER_006_001", "IMPC_FER_007_001", "IMPC_FER_008_001", "IMPC_FER_009_001");
public static final List<String> femaleFertilityParameters = Arrays.asList("IMPC_FER_019_001", "IMPC_FER_010_001", "IMPC_FER_011_001", "IMPC_FER_012_001", "IMPC_FER_013_001");
public ObservationIndexer() {
}
@Override
public RunStatus validateBuild() throws IndexerException {
return super.validateBuild(observationSolrServer);
}
public static void main(String[] args) throws IndexerException {
RunStatus runStatus = new RunStatus();
ObservationIndexer main = new ObservationIndexer();
main.initialise(args, runStatus);
main.run();
main.validateBuild();
}
@Override
public void initialise(String[] args, RunStatus runStatus) throws IndexerException {
super.initialise(args, runStatus);
try {
connection = komp2DataSource.getConnection();
pipelineMap = IndexerMap.getImpressPipelines(connection);
procedureMap = IndexerMap.getImpressProcedures(connection);
parameterMap = IndexerMap.getImpressParameters(connection);
} catch (SQLException e) {
throw new IndexerException(e);
}
printConfiguration();
}
@Override
public RunStatus run() throws IndexerException {
long count = 0;
RunStatus runStatus = new RunStatus();
long start = System.currentTimeMillis();
try {
populateDatasourceDataMap();
populateCategoryNamesDataMap();
populateBiologicalDataMap();
populateLineBiologicalDataMap();
populateParameterAssociationMap();
populateWeightMap();
populateIpgttWeightMap();
count = populateObservationSolrCore(runStatus);
} catch (SolrServerException | SQLException | IOException e) {
throw new IndexerException(e);
}
logger.info(" Added {} total beans in {}", count, commonUtils.msToHms(System.currentTimeMillis() - start));
return runStatus;
}
public long populateObservationSolrCore(RunStatus runStatus) throws SQLException, IOException, SolrServerException {
int count = 0;
observationSolrServer.deleteByQuery("*:*");
String query = "SELECT o.id as id, o.db_id as datasource_id, o.parameter_id as parameter_id, o.parameter_stable_id,\n"
+ "o.observation_type, o.missing, o.parameter_status, o.parameter_status_message,\n"
+ "o.biological_sample_id,\n"
+ "e.project_id as project_id, e.pipeline_id as pipeline_id, e.procedure_id as procedure_id,\n"
+ "e.date_of_experiment, e.external_id, e.id as experiment_id,\n"
+ "e.metadata_combined as metadata_combined, e.metadata_group as metadata_group,\n"
+ "co.category as raw_category,\n"
+ "uo.data_point as unidimensional_data_point,\n"
+ "mo.data_point as multidimensional_data_point,\n"
+ "tso.data_point as time_series_data_point,\n"
+ "tro.text as text_value,\n"
// + "ontE.term_value,\n"
+ "mo.order_index,\n"
+ "mo.dimension,\n"
+ "tso.time_point,\n"
+ "tso.discrete_point,\n"
+ "iro.file_type,\n"
+ "iro.download_file_path\n"
+ "FROM observation o\n"
+ "LEFT OUTER JOIN categorical_observation co ON o.id=co.id\n"
+ "LEFT OUTER JOIN unidimensional_observation uo ON o.id=uo.id\n"
+ "LEFT OUTER JOIN multidimensional_observation mo ON o.id=mo.id\n"
+ "LEFT OUTER JOIN time_series_observation tso ON o.id=tso.id\n"
+ "LEFT OUTER JOIN image_record_observation iro ON o.id=iro.id\n"
+ "LEFT OUTER JOIN text_observation tro ON o.id=tro.id\n"
// +" LEFT OUTER JOIN ontology_entity ontE ON o.id=ontE.ontology_observation_id\n"
+ "INNER JOIN experiment_observation eo ON eo.observation_id=o.id\n"
+ "INNER JOIN experiment e on eo.experiment_id=e.id\n"
+ "WHERE o.missing=0";
try (PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) {
p.setFetchSize(Integer.MIN_VALUE);
ResultSet r = p.executeQuery();
while (r.next()) {
ObservationDTOWrite o = new ObservationDTOWrite();
o.setId(r.getInt("id"));
o.setParameterId(r.getInt("parameter_id"));
o.setExperimentId(r.getInt("experiment_id"));
o.setExperimentSourceId(r.getString("external_id"));
ZonedDateTime dateOfExperiment = null;
try {
dateOfExperiment = ZonedDateTime.parse(r.getString("date_of_experiment"), DateTimeFormatter.ofPattern(DATETIME_FORMAT).withZone(ZoneId.of("UTC")));
o.setDateOfExperiment(dateOfExperiment);
} catch (NullPointerException e) {
logger.debug("No date of experiment set for experiment external ID: {}", r.getString("external_id"));
o.setDateOfExperiment(null);
}
o.setParameterId(parameterMap.get(r.getInt("parameter_id")).getId());
o.setParameterName(parameterMap.get(r.getInt("parameter_id")).getName());
o.setParameterStableId(parameterMap.get(r.getInt("parameter_id")).getStableId());
o.setProcedureId(procedureMap.get(r.getInt("procedure_id")).getId());
o.setProcedureName(procedureMap.get(r.getInt("procedure_id")).getName());
String procedureStableId = procedureMap.get(r.getInt("procedure_id")).getStableId();
o.setProcedureStableId(procedureStableId);
o.setProcedureGroup(procedureStableId.substring(0, procedureStableId.lastIndexOf("_")));
o.setPipelineId(pipelineMap.get(r.getInt("pipeline_id")).getId());
o.setPipelineName(pipelineMap.get(r.getInt("pipeline_id")).getName());
o.setPipelineStableId(pipelineMap.get(r.getInt("pipeline_id")).getStableId());
o.setDataSourceId(datasourceMap.get(r.getInt("datasource_id")).id);
o.setDataSourceName(datasourceMap.get(r.getInt("datasource_id")).name);
o.setProjectId(projectMap.get(r.getInt("project_id")).id);
o.setProjectName(projectMap.get(r.getInt("project_id")).name);
o.setMetadataGroup(r.getString("metadata_group"));
if (r.wasNull()) {
o.setMetadataGroup("");
o.setMetadata(new ArrayList<>());
}
String metadataCombined = r.getString("metadata_combined");
if (!r.wasNull()) {
o.setMetadata(new ArrayList<>(Arrays.asList(metadataCombined.split("::"))));
}
// Add the Biological data
String bioSampleId = r.getString("biological_sample_id");
if (r.wasNull()) {
// Line level data
BiologicalDataBean b = lineBiologicalData.get(r.getString("experiment_id"));
if (b == null) {
runStatus.addError(" Cannot find biological data for experiment " + r.getString("experiment_id"));
continue;
}
o.setBiologicalModelId(b.biologicalModelId);
o.setGeneAccession(b.geneAcc);
o.setGeneSymbol(b.geneSymbol);
o.setAlleleAccession(b.alleleAccession);
o.setAlleleSymbol(b.alleleSymbol);
o.setStrainAccessionId(b.strainAcc);
o.setStrainName(b.strainName);
o.setGeneticBackground(b.geneticBackground);
o.setPhenotypingCenter(b.phenotypingCenterName);
o.setPhenotypingCenterId(b.phenotypingCenterId);
o.setColonyId(b.colonyId);
// Viability applies to both sexes
if (o.getParameterStableId().contains("_VIA_")) {
o.setSex(SexType.both.getName());
} else {
// Fertility applies to the sex tested, separate parameters per male//female
if (maleFertilityParameters.contains(o.getParameterStableId())) {
o.setSex(SexType.male.getName());
} else if (femaleFertilityParameters.contains(o.getParameterStableId())) {
o.setSex(SexType.female.getName());
}
if (o.getSex() == null) {
o.setSex(SexType.both.getName());
}
}
if (b.zygosity != null) {
o.setZygosity(b.zygosity);
} else {
// Default to hom
o.setZygosity(ZygosityType.homozygote.getName());
}
// All line level parameters are sample group "experimental" due to the nature of the
// procedures (i.e. no control mice will go through VIA or FER procedures.)
o.setGroup(BiologicalSampleType.experimental.getName());
} else {
// Specimen level data
BiologicalDataBean b = biologicalData.get(bioSampleId);
o.setBiologicalModelId(b.biologicalModelId);
o.setGeneAccession(b.geneAcc);
o.setGeneSymbol(b.geneSymbol);
o.setAlleleAccession(b.alleleAccession);
o.setAlleleSymbol(b.alleleSymbol);
o.setStrainAccessionId(b.strainAcc);
o.setStrainName(b.strainName);
o.setGeneticBackground(b.geneticBackground);
o.setPhenotypingCenter(b.phenotypingCenterName);
o.setPhenotypingCenterId(b.phenotypingCenterId);
o.setColonyId(b.colonyId);
o.setZygosity(b.zygosity);
o.setDateOfBirth(b.dateOfBirth);
o.setSex(b.sex);
o.setGroup(b.sampleGroup);
o.setBiologicalSampleId(b.biologicalSampleId);
o.setExternalSampleId(b.externalSampleId);
o.setDevelopmentStageAcc(b.developmentalStageAcc);
o.setDevelopmentStageName(b.developmentalStageName);
if (b.productionCenterName!=null){
o.setProductionCenter(b.productionCenterName);
}
if (b.productionCenterId!=null){
o.setProductionCenterId(b.productionCenterId);
}
if (b.litterId!=null){
o.setLitterId(b.litterId);
}
}
o.setObservationType(r.getString("observation_type"));
String cat = r.getString("raw_category");
if (!r.wasNull()) {
String param = r.getString("parameter_stable_id");
if (translateCategoryNames.containsKey(param)) {
String transCat = translateCategoryNames.get(param).get(cat);
//System.out.println("param with cat is="+param+" cat="+cat);
//System.out.println("transCat="+transCat);
if (transCat != null && !transCat.equals("")) {
o.setCategory(transCat);
} else {
o.setCategory(cat);
}
} else {
o.setCategory(cat);
}
}
// Add the correct "data point" for the type
switch (r.getString("observation_type")) {
case "unidimensional":
o.setDataPoint(r.getFloat("unidimensional_data_point"));
break;
case "multidimensional":
o.setDataPoint(r.getFloat("multidimensional_data_point"));
break;
case "time_series":
o.setDataPoint(r.getFloat("time_series_data_point"));
break;
}
Integer order_index = r.getInt("order_index");
if (!r.wasNull()) {
o.setOrderIndex(order_index);
}
String dimension = r.getString("dimension");
if (!r.wasNull()) {
o.setDimension(dimension);
}
String time_point = r.getString("time_point");
if (!r.wasNull()) {
o.setTimePoint(time_point);
}
Float discrete_point = r.getFloat("discrete_point");
if (!r.wasNull()) {
o.setDiscretePoint(discrete_point);
}
String text_value = r.getString("text_value");
if (!r.wasNull()) {
o.setTextValue(text_value);
}
// String term_value = r.getString("term_value");
// if (!r.wasNull()) {
// o.setTermValue(term_value);
// }
String file_type = r.getString("file_type");
if (!r.wasNull()) {
o.setFileType(file_type);
}
String download_file_path = r.getString("download_file_path");
if (!r.wasNull()) {
o.setDownloadFilePath(download_file_path);
}
if (parameterAssociationMap.containsKey(r.getInt("id"))) {
for (ParameterAssociationBean pb : parameterAssociationMap.get(r.getInt("id"))) {
// Will never be null, we hope
o.addParameterAssociationStableId(pb.parameterStableId);
o.addParameterAssociationName(pb.parameterAssociationName);
if (StringUtils.isNotEmpty(pb.parameterAssociationValue)) {
o.addParameterAssociationValue(pb.parameterAssociationValue);
}
if (StringUtils.isNotEmpty(pb.sequenceId)) {
o.addParameterAssociationSequenceId(pb.sequenceId);
}
if (StringUtils.isNotEmpty(pb.dimId)) {
o.addParameterAssociationDimId(pb.dimId);
}
}
}
// Add weight parameters only if this observation isn't for a weight parameter
if (!Constants.weightParameters.contains(o.getParameterStableId()) && !ipgttWeightParameter.equals(o.getParameterStableId())) {
WeightBean b = getNearestWeight(o.getBiologicalSampleId(), dateOfExperiment);
if (o.getProcedureGroup().contains("_IPG")) {
b = getNearestIpgttWeight(o.getBiologicalSampleId());
}
if (b != null) {
o.setWeight(b.weight);
o.setWeightDate(b.date);
o.setWeightDaysOld(b.daysOld);
o.setWeightParameterStableId(b.parameterStableId);
}
}
// 60 seconds between commits
documentCount++;
observationSolrServer.addBean(o, 60000);
count++;
if (count % 2000000 == 0) {
logger.info(" Added " + count + " beans");
}
}
// Final commit to save the rest of the docs
observationSolrServer.commit();
} catch (Exception e) {
e.printStackTrace();
System.out.println(" Big error :" + e.getMessage());
}
return count;
}
/**
* Add all the relevant data required quickly looking up biological data associated to a biological sample
*
* @throws SQLException when a database exception occurs
*/
public void populateBiologicalDataMap() throws SQLException {
String featureFlagMeansQuery = "SELECT column_name FROM information_schema.COLUMNS WHERE TABLE_NAME='biological_sample' AND TABLE_SCHEMA=(SELECT database())";
Set<String> featureFlags = new HashSet<>();
try (PreparedStatement p = connection.prepareStatement(featureFlagMeansQuery)) {
ResultSet r = p.executeQuery();
while (r.next()) {
featureFlags.add(r.getString("column_name"));
}
}
String query = "SELECT CAST(bs.id AS CHAR) as biological_sample_id, bs.organisation_id as phenotyping_center_id, "
+ "org.name as phenotyping_center_name, bs.sample_group, bs.external_id as external_sample_id, "
+ "ls.date_of_birth, ls.colony_id, ls.sex as sex, ls.zygosity, ls.developmental_stage_acc, ot.name AS developmental_stage_name, ot.acc as developmental_stage_acc,"
+ "bms.biological_model_id, "
+ "strain.acc as strain_acc, strain.name as strain_name, bm.genetic_background, "
+ "(select distinct allele_acc from biological_model_allele bma WHERE bma.biological_model_id=bms.biological_model_id) as allele_accession, "
+ "(select distinct a.symbol from biological_model_allele bma INNER JOIN allele a on (a.acc=bma.allele_acc AND a.db_id=bma.allele_db_id) WHERE bma.biological_model_id=bms.biological_model_id) as allele_symbol, "
+ "(select distinct gf_acc from biological_model_genomic_feature bmgf WHERE bmgf.biological_model_id=bms.biological_model_id) as acc, "
+ "(select distinct gf.symbol from biological_model_genomic_feature bmgf INNER JOIN genomic_feature gf on gf.acc=bmgf.gf_acc WHERE bmgf.biological_model_id=bms.biological_model_id) as symbol ";
if (featureFlags.contains("production_center_id") && featureFlags.contains("litter_id")) {
query += ", bs.production_center_id, prod_org.name as production_center_name, bs.litter_id ";
}
query += "FROM biological_sample bs "
+ "INNER JOIN organisation org ON bs.organisation_id=org.id "
+ "INNER JOIN live_sample ls ON bs.id=ls.id "
+ "INNER JOIN biological_model_sample bms ON bs.id=bms.biological_sample_id "
+ "INNER JOIN biological_model_strain bmstrain ON bmstrain.biological_model_id=bms.biological_model_id "
+ "INNER JOIN strain strain ON strain.acc=bmstrain.strain_acc "
+ "INNER JOIN biological_model bm ON bm.id = bms.biological_model_id "
+ "INNER JOIN ontology_term ot ON ot.acc=ls.developmental_stage_acc ";
if (featureFlags.contains("production_center_id") && featureFlags.contains("litter_id")) {
query += "INNER JOIN organisation prod_org ON bs.organisation_id=prod_org.id ";
}
try (PreparedStatement p = connection.prepareStatement(query)) {
ResultSet resultSet = p.executeQuery();
while (resultSet.next()) {
BiologicalDataBean b = new BiologicalDataBean();
b.alleleAccession = resultSet.getString("allele_accession");
b.alleleSymbol = resultSet.getString("allele_symbol");
b.biologicalModelId = resultSet.getInt("biological_model_id");
b.biologicalSampleId = resultSet.getInt("biological_sample_id");
b.colonyId = resultSet.getString("colony_id");
try {
b.dateOfBirth = ZonedDateTime.parse(resultSet.getString("date_of_birth"), DateTimeFormatter.ofPattern(DATETIME_FORMAT).withZone(ZoneId.of("UTC")));
} catch (NullPointerException e) {
b.dateOfBirth = null;
logger.debug("No date of birth set for specimen external ID: {}", resultSet.getString("external_sample_id"));
}
b.externalSampleId = resultSet.getString("external_sample_id");
b.geneAcc = resultSet.getString("acc");
b.geneSymbol = resultSet.getString("symbol");
b.phenotypingCenterId = resultSet.getInt("phenotyping_center_id");
b.phenotypingCenterName = resultSet.getString("phenotyping_center_name");
b.sampleGroup = resultSet.getString("sample_group");
b.sex = resultSet.getString("sex");
b.strainAcc = resultSet.getString("strain_acc");
b.strainName = resultSet.getString("strain_name");
b.geneticBackground = resultSet.getString("genetic_background");
b.zygosity = resultSet.getString("zygosity");
b.developmentalStageAcc = resultSet.getString("developmental_stage_acc");
b.developmentalStageName = resultSet.getString("developmental_stage_name");
if (featureFlags.contains("production_center_id") && featureFlags.contains("litter_id")) {
b.productionCenterId = resultSet.getInt("production_center_id");
b.productionCenterName = resultSet.getString("production_center_name");
b.litterId = resultSet.getString("litter_id");
}
biologicalData.put(resultSet.getString("biological_sample_id"), b);
}
}
}
/**
* Add all the relevant data required quickly looking up biological data associated to a biological model (really an
* experiment)
*
* @throws SQLException when a database exception occurs
*/
public void populateLineBiologicalDataMap() throws SQLException {
String query = "SELECT e.id as experiment_id, e.colony_id, e.biological_model_id, "
+ "e.organisation_id as phenotyping_center_id, org.name as phenotyping_center_name, "
+ "strain.acc as strain_acc, strain.name as strain_name, bm.genetic_background, "
+ "(select distinct allele_acc from biological_model_allele bma WHERE bma.biological_model_id=e.biological_model_id) as allele_accession, "
+ "(select distinct a.symbol from biological_model_allele bma INNER JOIN allele a on (a.acc=bma.allele_acc AND a.db_id=bma.allele_db_id) WHERE bma.biological_model_id=e.biological_model_id) as allele_symbol, "
+ "(select distinct gf_acc from biological_model_genomic_feature bmgf WHERE bmgf.biological_model_id=e.biological_model_id) as acc, "
+ "(select distinct gf.symbol from biological_model_genomic_feature bmgf INNER JOIN genomic_feature gf on gf.acc=bmgf.gf_acc WHERE bmgf.biological_model_id=e.biological_model_id) as symbol "
+ "FROM experiment e "
+ "INNER JOIN organisation org ON e.organisation_id=org.id "
+ "INNER JOIN biological_model_strain bm_strain ON bm_strain.biological_model_id=e.biological_model_id "
+ "INNER JOIN strain strain ON strain.acc=bm_strain.strain_acc "
+ "INNER JOIN biological_model bm ON bm.id = e.biological_model_id";
try (PreparedStatement p = connection.prepareStatement(query)) {
ResultSet resultSet = p.executeQuery();
while (resultSet.next()) {
BiologicalDataBean b = new BiologicalDataBean();
b.colonyId = resultSet.getString("colony_id");
b.phenotypingCenterId = resultSet.getInt("phenotyping_center_id");
b.phenotypingCenterName = resultSet.getString("phenotyping_center_name");
b.strainAcc = resultSet.getString("strain_acc");
b.strainName = resultSet.getString("strain_name");
b.geneticBackground = resultSet.getString("genetic_background");
b.alleleAccession = resultSet.getString("allele_accession");
b.alleleSymbol = resultSet.getString("allele_symbol");
b.biologicalModelId = resultSet.getInt("biological_model_id");
b.geneAcc = resultSet.getString("acc");
b.geneSymbol = resultSet.getString("symbol");
if (b.alleleAccession == null && b.colonyId != null) {
// Override the biological model with one that has the
// correct gene/allele/strain
String query2 = "SELECT DISTINCT bm.id as biological_model_id, " +
" (select distinct allele_acc from biological_model_allele bma WHERE bma.biological_model_id=bm.id) as allele_accession, " +
" (select distinct a.symbol from biological_model_allele bma INNER JOIN allele a on (a.acc=bma.allele_acc AND a.db_id=bma.allele_db_id) WHERE bma.biological_model_id=bm.id) as allele_symbol, " +
" (select distinct gf_acc from biological_model_genomic_feature bmgf WHERE bmgf.biological_model_id=bm.id) as acc, " +
" (select distinct gf.symbol from biological_model_genomic_feature bmgf INNER JOIN genomic_feature gf on gf.acc=bmgf.gf_acc WHERE bmgf.biological_model_id=bm.id) as symbol, " +
" strain.acc as strain_acc, strain.name as strain_name, bm.genetic_background " +
" FROM live_sample ls " +
" INNER JOIN biological_model_sample bms ON bms.biological_sample_id=ls.id " +
" INNER JOIN biological_model bm ON bm.id=bms.biological_model_id " +
" INNER JOIN biological_model_strain bm_strain ON bm_strain.biological_model_id=bm.id " +
" INNER JOIN strain strain ON strain.acc=bm_strain.strain_acc " +
" WHERE bm.allelic_composition !='' AND ls.colony_id = ? LIMIT 1 " ;
try (PreparedStatement p2 = connection.prepareStatement(query2)) {
p2.setString(1, resultSet.getString("colony_id"));
ResultSet resultSet2 = p2.executeQuery();
resultSet2.next();
b.strainAcc = resultSet2.getString("strain_acc");
b.strainName = resultSet2.getString("strain_name");
b.geneticBackground = resultSet2.getString("genetic_background");
b.alleleAccession = resultSet2.getString("allele_accession");
b.alleleSymbol = resultSet2.getString("allele_symbol");
b.biologicalModelId = resultSet2.getInt("biological_model_id");
b.geneAcc = resultSet2.getString("acc");
b.geneSymbol = resultSet2.getString("symbol");
}
}
lineBiologicalData.put(resultSet.getString("experiment_id"), b);
}
}
}
/**
* Add all the relevant data required for translating the category names in the cases where the category names are
* numerals, but the actual name is in the description field
*
* @throws SQLException when a database exception occurs
*/
public void populateCategoryNamesDataMap() throws SQLException {
String query = "SELECT pp.stable_id, ppo.name, ppo.description FROM phenotype_parameter pp "
+ "INNER JOIN phenotype_parameter_lnk_option pplo ON pp.id=pplo.parameter_id "
+ "INNER JOIN phenotype_parameter_option ppo ON ppo.id=pplo.option_id "
+ "WHERE ppo.name NOT REGEXP '^[a-zA-Z]' AND ppo.description!='' ";
try (PreparedStatement p = connection.prepareStatement(query)) {
ResultSet resultSet = p.executeQuery();
while (resultSet.next()) {
String stableId = resultSet.getString("stable_id");
logger.debug(" parameter_stable_id for numeric category: {}", stableId);
if (!translateCategoryNames.containsKey(stableId)) {
translateCategoryNames.put(stableId, new HashMap<>());
}
String name = resultSet.getString("name");
String description = resultSet.getString("description");
if (name.matches("[0-9]+")) {
// add .0 onto string as this is what our numerical categories look in solr!!!!
name += ".0";
translateCategoryNames.get(stableId).put(name, description);
} else {
logger.debug(" Not translating non alphabetical category for parameter: " + stableId + ", name: " + name + ", desc:" + description);
}
}
}
}
public void populateParameterAssociationMap() throws SQLException {
Map<String, String> stableIdToNameMap = this.getAllParameters();
String query = "SELECT id, observation_id, parameter_id, sequence_id, dim_id, parameter_association_value FROM parameter_association";
try (PreparedStatement p = connection.prepareStatement(query)) {
ResultSet resultSet = p.executeQuery();
while (resultSet.next()) {
Integer obsId = resultSet.getInt("observation_id");
ParameterAssociationBean pb = new ParameterAssociationBean();
pb.observationId = obsId;
pb.parameterStableId = resultSet.getString("parameter_id");
pb.parameterAssociationValue = resultSet.getString("parameter_association_value");
if (stableIdToNameMap.get(pb.parameterStableId) != null) {
pb.parameterAssociationName = stableIdToNameMap.get(pb.parameterStableId);
}
pb.sequenceId = resultSet.getString("sequence_id");
pb.dimId = resultSet.getString("dim_id");
if (!parameterAssociationMap.containsKey(obsId)) {
parameterAssociationMap.put(obsId, new ArrayList<>());
}
parameterAssociationMap.get(obsId).add(pb);
}
}
}
/**
* Return all parameter stable ids and names
*
* @throws SQLException When a database error occurrs
*/
public Map<String, String> getAllParameters() throws SQLException {
Map<String, String> parameters = new HashMap<>();
String query = "SELECT stable_id, name FROM komp2.phenotype_parameter";
try (PreparedStatement statement = getConnection().prepareStatement(query)) {
ResultSet resultSet = statement.executeQuery();
while (resultSet.next()) {
parameters.put(resultSet.getString("stable_id"), resultSet.getString("name"));
}
}
return parameters;
}
public void populateDatasourceDataMap() throws SQLException {
List<String> queries = new ArrayList<>();
queries.add("SELECT id, short_name as name, 'DATASOURCE' as datasource_type FROM external_db");
queries.add("SELECT id, name, 'PROJECT' as datasource_type FROM project");
for (String query : queries) {
try (PreparedStatement p = connection.prepareStatement(query)) {
ResultSet resultSet = p.executeQuery();
while (resultSet.next()) {
DatasourceBean b = new DatasourceBean();
b.id = resultSet.getInt("id");
b.name = resultSet.getString("name");
switch (resultSet.getString("datasource_type")) {
case "DATASOURCE":
datasourceMap.put(resultSet.getInt("id"), b);
break;
case "PROJECT":
projectMap.put(resultSet.getInt("id"), b);
break;
}
}
}
}
}
/**
* Compare all weight dates to select the nearest to the date of experiment
*
* @param specimenID the specimen
* @param dateOfExperiment the date
* @return the nearest weight bean to the date of the experiment
*/
public WeightBean getNearestWeight(Integer specimenID, ZonedDateTime dateOfExperiment) {
WeightBean nearest = null;
if (dateOfExperiment != null && weightMap.containsKey(specimenID)) {
for (WeightBean candidate : weightMap.get(specimenID)) {
if (nearest == null) {
nearest = candidate;
continue;
}
if (Math.abs(dateOfExperiment.toInstant().toEpochMilli() - candidate.date.toInstant().toEpochMilli()) < Math.abs(dateOfExperiment.toInstant().toEpochMilli() - nearest.date.toInstant().toEpochMilli())) {
nearest = candidate;
}
}
}
// Do not return weight that is > 4 days away from the experiment
// since the weight of the specimen become less and less relevant
// (Heuristic from Natasha Karp @ WTSI)
// 4 days = 345,600,000 ms
if (nearest != null && Math.abs(dateOfExperiment.toInstant().toEpochMilli() - nearest.date.toInstant().toEpochMilli()) > 3.456E8) {
nearest = null;
}
return nearest;
}
/**
* Select date of experiment
*
* @param specimenID the specimen
* @return the nearest weight bean to the date of the experiment
*/
public WeightBean getNearestIpgttWeight(Integer specimenID) {
WeightBean nearest = null;
if (ipgttWeightMap.containsKey(specimenID)) {
nearest = ipgttWeightMap.get(specimenID);
}
return nearest;
}
/**
* Return map of specimen ID => List of all weights ordered by date ASC
*
* @exception SQLException When a database error occurs
*/
public void populateWeightMap() throws SQLException {
int count=0;
String query = "SELECT\n" +
" o.biological_sample_id, \n" +
" data_point AS weight, \n" +
" parameter_stable_id, \n" +
" date_of_experiment, \n" +
" datediff(date_of_experiment, ls.date_of_birth) as days_old, \n" +
" e.organisation_id \n" +
"FROM observation o \n" +
" INNER JOIN unidimensional_observation uo ON uo.id = o.id \n" +
" INNER JOIN live_sample ls ON ls.id=o.biological_sample_id \n" +
" INNER JOIN experiment_observation eo ON o.id = eo.observation_id \n" +
" INNER JOIN experiment e ON e.id = eo.experiment_id \n" +
"WHERE parameter_stable_id IN ("+StringUtils.join(Constants.weightParameters, ",")+") AND data_point > 0" +
" ORDER BY biological_sample_id, date_of_experiment ASC \n" ;
try (PreparedStatement statement = getConnection().prepareStatement(query)) {
ResultSet resultSet = statement.executeQuery();
while (resultSet.next()) {
WeightBean b = new WeightBean();
try {
b.date = ZonedDateTime.parse(resultSet.getString("date_of_experiment"), DateTimeFormatter.ofPattern(DATETIME_FORMAT).withZone(ZoneId.of("UTC")));
} catch (NullPointerException e) {
b.date = null;
logger.debug("No date of experiment set for sample id {} parameter {}", resultSet.getString("biological_sample_id"), resultSet.getString("parameter_stable_id"));
}
b.weight = resultSet.getFloat("weight");
b.parameterStableId = resultSet.getString("parameter_stable_id");
b.daysOld = resultSet.getInt("days_old");
final Integer specimenId = resultSet.getInt("biological_sample_id");
if (!weightMap.containsKey(specimenId)) {
weightMap.put(specimenId, new ArrayList<>());
}
weightMap.get(specimenId).add(b);
count += 1;
}
}
logger.info(" Added {} specimen weight data map entries", count, weightMap.size());
}
/**
* Return map of specimen ID => weight for
*
* @exception SQLException When a database error occurrs
*/
public void populateIpgttWeightMap() throws SQLException {
String query = "SELECT o.biological_sample_id, data_point AS weight, parameter_stable_id, " +
"date_of_experiment, DATEDIFF(date_of_experiment, ls.date_of_birth) AS days_old " +
"FROM observation o " +
" INNER JOIN unidimensional_observation uo ON uo.id = o.id " +
" INNER JOIN live_sample ls ON ls.id=o.biological_sample_id " +
" INNER JOIN experiment_observation eo ON o.id = eo.observation_id " +
" INNER JOIN experiment e ON e.id = eo.experiment_id " +
"WHERE parameter_stable_id = '"+ ipgttWeightParameter +"' " ;
try (PreparedStatement statement = getConnection().prepareStatement(query)) {
ResultSet resultSet = statement.executeQuery();
while (resultSet.next()) {
WeightBean b = new WeightBean();
try {
b.date = ZonedDateTime.parse(resultSet.getString("date_of_experiment"), DateTimeFormatter.ofPattern(DATETIME_FORMAT).withZone(ZoneId.of("UTC")));
} catch (NullPointerException e) {
b.date = null;
logger.debug("No date of experiment set for sample id {} parameter {}", resultSet.getString("biological_sample_id"), resultSet.getString("parameter_stable_id"));
}
b.weight = resultSet.getFloat("weight");
b.parameterStableId = resultSet.getString("parameter_stable_id");
b.daysOld = resultSet.getInt("days_old");
final Integer specimenId = resultSet.getInt("biological_sample_id");
ipgttWeightMap.put(specimenId, b);
}
}
}
public static Connection getConnection() {
return connection;
}
public Map<String, Map<String, String>> getTranslateCategoryNames() {
return translateCategoryNames;
}
public Map<String, BiologicalDataBean> getLineBiologicalData() {
return lineBiologicalData;
}
public Map<String, BiologicalDataBean> getBiologicalData() {
return biologicalData;
}
public Map<Integer, DatasourceBean> getDatasourceMap() {
return datasourceMap;
}
public Map<Integer, DatasourceBean> getProjectMap() {
return projectMap;
}
public Map<Integer, List<WeightBean>> getWeightMap() {
return weightMap;
}
/**
* Internal class to act as Map value DTO for biological data
*/
protected class BiologicalDataBean {
public String alleleAccession;
public String alleleSymbol;
public Integer biologicalModelId;
public Integer biologicalSampleId;
public String colonyId;
public ZonedDateTime dateOfBirth;
public String externalSampleId;
public String geneAcc;
public String geneSymbol;
public String phenotypingCenterName;
public Integer phenotypingCenterId;
public String sampleGroup;
public String sex;
public String strainAcc;
public String strainName;
public String geneticBackground;
public String zygosity;
public String developmentalStageAcc;
public String developmentalStageName;
public String productionCenterName;
public Integer productionCenterId;
public String litterId;
}
/**
* Internal class to act as Map value DTO for weight data
*/
protected class WeightBean {
public String parameterStableId;
public ZonedDateTime date;
public Float weight;
public Integer daysOld;
@Override
public String toString() {
return "WeightBean{" +
"parameterStableId='" + parameterStableId + '\'' +
", date=" + date +
", weight=" + weight +
", daysOld=" + daysOld +
'}';
}
}
/**
* Internal class to act as Map value DTO for datasource data
*/
protected class DatasourceBean {
public Integer id;
public String name;
}
/**
* Internal class to act as Map value DTO for datasource data
*/
protected class ParameterAssociationBean {
public String parameterAssociationName;
public String parameterAssociationValue;
public Integer id;
public Integer observationId;
public String parameterStableId;
public String sequenceId;
public String dimId;
}
}
| indexers/src/main/java/org/mousephenotype/cda/indexers/ObservationIndexer.java | /*******************************************************************************
* Copyright 2015 EMBL - European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*******************************************************************************/
package org.mousephenotype.cda.indexers;
import org.apache.commons.lang3.StringUtils;
import org.apache.solr.client.solrj.SolrServer;
import org.apache.solr.client.solrj.SolrServerException;
import org.mousephenotype.cda.constants.Constants;
import org.mousephenotype.cda.enumerations.BiologicalSampleType;
import org.mousephenotype.cda.enumerations.SexType;
import org.mousephenotype.cda.enumerations.ZygosityType;
import org.mousephenotype.cda.indexers.exceptions.IndexerException;
import org.mousephenotype.cda.indexers.utils.IndexerMap;
import org.mousephenotype.cda.solr.service.dto.ImpressBaseDTO;
import org.mousephenotype.cda.solr.service.dto.ObservationDTOWrite;
import org.mousephenotype.cda.solr.service.dto.ParameterDTO;
import org.mousephenotype.cda.utilities.CommonUtils;
import org.mousephenotype.cda.utilities.RunStatus;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import javax.sql.DataSource;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.*;
/**
* Populate the experiment core
*/
public class ObservationIndexer extends AbstractIndexer {
public static final String DATETIME_FORMAT = "yyyy-MM-dd HH:mm:ss.S";
private final org.slf4j.Logger logger = LoggerFactory.getLogger(this.getClass());
private static Connection connection;
CommonUtils commonUtils = new CommonUtils();
@Autowired
@Qualifier("komp2DataSource")
DataSource komp2DataSource;
@Autowired
@Qualifier("observationIndexing")
SolrServer observationSolrServer;
Map<String, BiologicalDataBean> biologicalData = new HashMap<>();
Map<String, BiologicalDataBean> lineBiologicalData = new HashMap<>();
Map<Integer, ImpressBaseDTO> pipelineMap = new HashMap<>();
Map<Integer, ImpressBaseDTO> procedureMap = new HashMap<>();
Map<Integer, ParameterDTO> parameterMap = new HashMap<>();
Map<Integer, DatasourceBean> datasourceMap = new HashMap<>();
Map<Integer, DatasourceBean> projectMap = new HashMap<>();
Map<Integer, List<ParameterAssociationBean>> parameterAssociationMap = new HashMap<>();
Map<Integer, List<WeightBean>> weightMap = new HashMap<>();
Map<Integer, WeightBean> ipgttWeightMap = new HashMap<>();
Map<String, Map<String, String>> translateCategoryNames = new HashMap<>();
public static final String ipgttWeightParameter = "IMPC_IPG_001_001";
public static final List<String> maleFertilityParameters = Arrays.asList("IMPC_FER_001_001", "IMPC_FER_006_001", "IMPC_FER_007_001", "IMPC_FER_008_001", "IMPC_FER_009_001");
public static final List<String> femaleFertilityParameters = Arrays.asList("IMPC_FER_019_001", "IMPC_FER_010_001", "IMPC_FER_011_001", "IMPC_FER_012_001", "IMPC_FER_013_001");
public ObservationIndexer() {
}
@Override
public RunStatus validateBuild() throws IndexerException {
return super.validateBuild(observationSolrServer);
}
public static void main(String[] args) throws IndexerException {
RunStatus runStatus = new RunStatus();
ObservationIndexer main = new ObservationIndexer();
main.initialise(args, runStatus);
main.run();
main.validateBuild();
}
@Override
public void initialise(String[] args, RunStatus runStatus) throws IndexerException {
super.initialise(args, runStatus);
try {
connection = komp2DataSource.getConnection();
pipelineMap = IndexerMap.getImpressPipelines(connection);
procedureMap = IndexerMap.getImpressProcedures(connection);
parameterMap = IndexerMap.getImpressParameters(connection);
} catch (SQLException e) {
throw new IndexerException(e);
}
printConfiguration();
}
@Override
public RunStatus run() throws IndexerException {
long count = 0;
RunStatus runStatus = new RunStatus();
long start = System.currentTimeMillis();
try {
populateDatasourceDataMap();
populateCategoryNamesDataMap();
populateBiologicalDataMap();
populateLineBiologicalDataMap();
populateParameterAssociationMap();
populateWeightMap();
populateIpgttWeightMap();
count = populateObservationSolrCore(runStatus);
} catch (SolrServerException | SQLException | IOException e) {
throw new IndexerException(e);
}
logger.info(" Added {} total beans in {}", count, commonUtils.msToHms(System.currentTimeMillis() - start));
return runStatus;
}
public long populateObservationSolrCore(RunStatus runStatus) throws SQLException, IOException, SolrServerException {
int count = 0;
observationSolrServer.deleteByQuery("*:*");
String query = "SELECT o.id as id, o.db_id as datasource_id, o.parameter_id as parameter_id, o.parameter_stable_id,\n"
+ "o.observation_type, o.missing, o.parameter_status, o.parameter_status_message,\n"
+ "o.biological_sample_id,\n"
+ "e.project_id as project_id, e.pipeline_id as pipeline_id, e.procedure_id as procedure_id,\n"
+ "e.date_of_experiment, e.external_id, e.id as experiment_id,\n"
+ "e.metadata_combined as metadata_combined, e.metadata_group as metadata_group,\n"
+ "co.category as raw_category,\n"
+ "uo.data_point as unidimensional_data_point,\n"
+ "mo.data_point as multidimensional_data_point,\n"
+ "tso.data_point as time_series_data_point,\n"
+ "tro.text as text_value,\n"
+ "ontE.term_value,\n"
+ "mo.order_index,\n"
+ "mo.dimension,\n"
+ "tso.time_point,\n"
+ "tso.discrete_point,\n"
+ "iro.file_type,\n"
+ "iro.download_file_path\n"
+ "FROM observation o\n"
+ "LEFT OUTER JOIN categorical_observation co ON o.id=co.id\n"
+ "LEFT OUTER JOIN unidimensional_observation uo ON o.id=uo.id\n"
+ "LEFT OUTER JOIN multidimensional_observation mo ON o.id=mo.id\n"
+ "LEFT OUTER JOIN time_series_observation tso ON o.id=tso.id\n"
+ "LEFT OUTER JOIN image_record_observation iro ON o.id=iro.id\n"
+ "LEFT OUTER JOIN text_observation tro ON o.id=tro.id\n"
+" LEFT OUTER JOIN ontology_entity ontE ON o.id=ontE.ontology_observation_id\n"
+ "INNER JOIN experiment_observation eo ON eo.observation_id=o.id\n"
+ "INNER JOIN experiment e on eo.experiment_id=e.id\n"
+ "WHERE o.missing=0";
try (PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) {
p.setFetchSize(Integer.MIN_VALUE);
ResultSet r = p.executeQuery();
while (r.next()) {
ObservationDTOWrite o = new ObservationDTOWrite();
o.setId(r.getInt("id"));
o.setParameterId(r.getInt("parameter_id"));
o.setExperimentId(r.getInt("experiment_id"));
o.setExperimentSourceId(r.getString("external_id"));
ZonedDateTime dateOfExperiment = null;
try {
dateOfExperiment = ZonedDateTime.parse(r.getString("date_of_experiment"), DateTimeFormatter.ofPattern(DATETIME_FORMAT).withZone(ZoneId.of("UTC")));
o.setDateOfExperiment(dateOfExperiment);
} catch (NullPointerException e) {
logger.debug("No date of experiment set for experiment external ID: {}", r.getString("external_id"));
o.setDateOfExperiment(null);
}
o.setParameterId(parameterMap.get(r.getInt("parameter_id")).getId());
o.setParameterName(parameterMap.get(r.getInt("parameter_id")).getName());
o.setParameterStableId(parameterMap.get(r.getInt("parameter_id")).getStableId());
o.setProcedureId(procedureMap.get(r.getInt("procedure_id")).getId());
o.setProcedureName(procedureMap.get(r.getInt("procedure_id")).getName());
String procedureStableId = procedureMap.get(r.getInt("procedure_id")).getStableId();
o.setProcedureStableId(procedureStableId);
o.setProcedureGroup(procedureStableId.substring(0, procedureStableId.lastIndexOf("_")));
o.setPipelineId(pipelineMap.get(r.getInt("pipeline_id")).getId());
o.setPipelineName(pipelineMap.get(r.getInt("pipeline_id")).getName());
o.setPipelineStableId(pipelineMap.get(r.getInt("pipeline_id")).getStableId());
o.setDataSourceId(datasourceMap.get(r.getInt("datasource_id")).id);
o.setDataSourceName(datasourceMap.get(r.getInt("datasource_id")).name);
o.setProjectId(projectMap.get(r.getInt("project_id")).id);
o.setProjectName(projectMap.get(r.getInt("project_id")).name);
o.setMetadataGroup(r.getString("metadata_group"));
if (r.wasNull()) {
o.setMetadataGroup("");
o.setMetadata(new ArrayList<>());
}
String metadataCombined = r.getString("metadata_combined");
if (!r.wasNull()) {
o.setMetadata(new ArrayList<>(Arrays.asList(metadataCombined.split("::"))));
}
// Add the Biological data
String bioSampleId = r.getString("biological_sample_id");
if (r.wasNull()) {
// Line level data
BiologicalDataBean b = lineBiologicalData.get(r.getString("experiment_id"));
if (b == null) {
runStatus.addError(" Cannot find biological data for experiment " + r.getString("experiment_id"));
continue;
}
o.setBiologicalModelId(b.biologicalModelId);
o.setGeneAccession(b.geneAcc);
o.setGeneSymbol(b.geneSymbol);
o.setAlleleAccession(b.alleleAccession);
o.setAlleleSymbol(b.alleleSymbol);
o.setStrainAccessionId(b.strainAcc);
o.setStrainName(b.strainName);
o.setGeneticBackground(b.geneticBackground);
o.setPhenotypingCenter(b.phenotypingCenterName);
o.setPhenotypingCenterId(b.phenotypingCenterId);
o.setColonyId(b.colonyId);
// Viability applies to both sexes
if (o.getParameterStableId().contains("_VIA_")) {
o.setSex(SexType.both.getName());
} else {
// Fertility applies to the sex tested, separate parameters per male//female
if (maleFertilityParameters.contains(o.getParameterStableId())) {
o.setSex(SexType.male.getName());
} else if (femaleFertilityParameters.contains(o.getParameterStableId())) {
o.setSex(SexType.female.getName());
}
if (o.getSex() == null) {
o.setSex(SexType.both.getName());
}
}
if (b.zygosity != null) {
o.setZygosity(b.zygosity);
} else {
// Default to hom
o.setZygosity(ZygosityType.homozygote.getName());
}
// All line level parameters are sample group "experimental" due to the nature of the
// procedures (i.e. no control mice will go through VIA or FER procedures.)
o.setGroup(BiologicalSampleType.experimental.getName());
} else {
// Specimen level data
BiologicalDataBean b = biologicalData.get(bioSampleId);
o.setBiologicalModelId(b.biologicalModelId);
o.setGeneAccession(b.geneAcc);
o.setGeneSymbol(b.geneSymbol);
o.setAlleleAccession(b.alleleAccession);
o.setAlleleSymbol(b.alleleSymbol);
o.setStrainAccessionId(b.strainAcc);
o.setStrainName(b.strainName);
o.setGeneticBackground(b.geneticBackground);
o.setPhenotypingCenter(b.phenotypingCenterName);
o.setPhenotypingCenterId(b.phenotypingCenterId);
o.setColonyId(b.colonyId);
o.setZygosity(b.zygosity);
o.setDateOfBirth(b.dateOfBirth);
o.setSex(b.sex);
o.setGroup(b.sampleGroup);
o.setBiologicalSampleId(b.biologicalSampleId);
o.setExternalSampleId(b.externalSampleId);
o.setDevelopmentStageAcc(b.developmentalStageAcc);
o.setDevelopmentStageName(b.developmentalStageName);
if (b.productionCenterName!=null){
o.setProductionCenter(b.productionCenterName);
}
if (b.productionCenterId!=null){
o.setProductionCenterId(b.productionCenterId);
}
if (b.litterId!=null){
o.setLitterId(b.litterId);
}
}
o.setObservationType(r.getString("observation_type"));
String cat = r.getString("raw_category");
if (!r.wasNull()) {
String param = r.getString("parameter_stable_id");
if (translateCategoryNames.containsKey(param)) {
String transCat = translateCategoryNames.get(param).get(cat);
//System.out.println("param with cat is="+param+" cat="+cat);
//System.out.println("transCat="+transCat);
if (transCat != null && !transCat.equals("")) {
o.setCategory(transCat);
} else {
o.setCategory(cat);
}
} else {
o.setCategory(cat);
}
}
// Add the correct "data point" for the type
switch (r.getString("observation_type")) {
case "unidimensional":
o.setDataPoint(r.getFloat("unidimensional_data_point"));
break;
case "multidimensional":
o.setDataPoint(r.getFloat("multidimensional_data_point"));
break;
case "time_series":
o.setDataPoint(r.getFloat("time_series_data_point"));
break;
}
Integer order_index = r.getInt("order_index");
if (!r.wasNull()) {
o.setOrderIndex(order_index);
}
String dimension = r.getString("dimension");
if (!r.wasNull()) {
o.setDimension(dimension);
}
String time_point = r.getString("time_point");
if (!r.wasNull()) {
o.setTimePoint(time_point);
}
Float discrete_point = r.getFloat("discrete_point");
if (!r.wasNull()) {
o.setDiscretePoint(discrete_point);
}
String text_value = r.getString("text_value");
if (!r.wasNull()) {
o.setTextValue(text_value);
}
String term_value = r.getString("term_value");
if (!r.wasNull()) {
o.setTermValue(term_value);
}
String file_type = r.getString("file_type");
if (!r.wasNull()) {
o.setFileType(file_type);
}
String download_file_path = r.getString("download_file_path");
if (!r.wasNull()) {
o.setDownloadFilePath(download_file_path);
}
if (parameterAssociationMap.containsKey(r.getInt("id"))) {
for (ParameterAssociationBean pb : parameterAssociationMap.get(r.getInt("id"))) {
// Will never be null, we hope
o.addParameterAssociationStableId(pb.parameterStableId);
o.addParameterAssociationName(pb.parameterAssociationName);
if (StringUtils.isNotEmpty(pb.parameterAssociationValue)) {
o.addParameterAssociationValue(pb.parameterAssociationValue);
}
if (StringUtils.isNotEmpty(pb.sequenceId)) {
o.addParameterAssociationSequenceId(pb.sequenceId);
}
if (StringUtils.isNotEmpty(pb.dimId)) {
o.addParameterAssociationDimId(pb.dimId);
}
}
}
// Add weight parameters only if this observation isn't for a weight parameter
if (!Constants.weightParameters.contains(o.getParameterStableId()) && !ipgttWeightParameter.equals(o.getParameterStableId())) {
WeightBean b = getNearestWeight(o.getBiologicalSampleId(), dateOfExperiment);
if (o.getProcedureGroup().contains("_IPG")) {
b = getNearestIpgttWeight(o.getBiologicalSampleId());
}
if (b != null) {
o.setWeight(b.weight);
o.setWeightDate(b.date);
o.setWeightDaysOld(b.daysOld);
o.setWeightParameterStableId(b.parameterStableId);
}
}
// 60 seconds between commits
documentCount++;
observationSolrServer.addBean(o, 60000);
count++;
if (count % 2000000 == 0) {
logger.info(" Added " + count + " beans");
}
}
// Final commit to save the rest of the docs
observationSolrServer.commit();
} catch (Exception e) {
e.printStackTrace();
System.out.println(" Big error :" + e.getMessage());
}
return count;
}
/**
* Add all the relevant data required quickly looking up biological data associated to a biological sample
*
* @throws SQLException when a database exception occurs
*/
public void populateBiologicalDataMap() throws SQLException {
String featureFlagMeansQuery = "SELECT column_name FROM information_schema.COLUMNS WHERE TABLE_NAME='biological_sample' AND TABLE_SCHEMA=(SELECT database())";
Set<String> featureFlags = new HashSet<>();
try (PreparedStatement p = connection.prepareStatement(featureFlagMeansQuery)) {
ResultSet r = p.executeQuery();
while (r.next()) {
featureFlags.add(r.getString("column_name"));
}
}
String query = "SELECT CAST(bs.id AS CHAR) as biological_sample_id, bs.organisation_id as phenotyping_center_id, "
+ "org.name as phenotyping_center_name, bs.sample_group, bs.external_id as external_sample_id, "
+ "ls.date_of_birth, ls.colony_id, ls.sex as sex, ls.zygosity, ls.developmental_stage_acc, ot.name AS developmental_stage_name, ot.acc as developmental_stage_acc,"
+ "bms.biological_model_id, "
+ "strain.acc as strain_acc, strain.name as strain_name, bm.genetic_background, "
+ "(select distinct allele_acc from biological_model_allele bma WHERE bma.biological_model_id=bms.biological_model_id) as allele_accession, "
+ "(select distinct a.symbol from biological_model_allele bma INNER JOIN allele a on (a.acc=bma.allele_acc AND a.db_id=bma.allele_db_id) WHERE bma.biological_model_id=bms.biological_model_id) as allele_symbol, "
+ "(select distinct gf_acc from biological_model_genomic_feature bmgf WHERE bmgf.biological_model_id=bms.biological_model_id) as acc, "
+ "(select distinct gf.symbol from biological_model_genomic_feature bmgf INNER JOIN genomic_feature gf on gf.acc=bmgf.gf_acc WHERE bmgf.biological_model_id=bms.biological_model_id) as symbol ";
if (featureFlags.contains("production_center_id") && featureFlags.contains("litter_id")) {
query += ", bs.production_center_id, prod_org.name as production_center_name, bs.litter_id ";
}
query += "FROM biological_sample bs "
+ "INNER JOIN organisation org ON bs.organisation_id=org.id "
+ "INNER JOIN live_sample ls ON bs.id=ls.id "
+ "INNER JOIN biological_model_sample bms ON bs.id=bms.biological_sample_id "
+ "INNER JOIN biological_model_strain bmstrain ON bmstrain.biological_model_id=bms.biological_model_id "
+ "INNER JOIN strain strain ON strain.acc=bmstrain.strain_acc "
+ "INNER JOIN biological_model bm ON bm.id = bms.biological_model_id "
+ "INNER JOIN ontology_term ot ON ot.acc=ls.developmental_stage_acc ";
if (featureFlags.contains("production_center_id") && featureFlags.contains("litter_id")) {
query += "INNER JOIN organisation prod_org ON bs.organisation_id=prod_org.id ";
}
try (PreparedStatement p = connection.prepareStatement(query)) {
ResultSet resultSet = p.executeQuery();
while (resultSet.next()) {
BiologicalDataBean b = new BiologicalDataBean();
b.alleleAccession = resultSet.getString("allele_accession");
b.alleleSymbol = resultSet.getString("allele_symbol");
b.biologicalModelId = resultSet.getInt("biological_model_id");
b.biologicalSampleId = resultSet.getInt("biological_sample_id");
b.colonyId = resultSet.getString("colony_id");
try {
b.dateOfBirth = ZonedDateTime.parse(resultSet.getString("date_of_birth"), DateTimeFormatter.ofPattern(DATETIME_FORMAT).withZone(ZoneId.of("UTC")));
} catch (NullPointerException e) {
b.dateOfBirth = null;
logger.debug("No date of birth set for specimen external ID: {}", resultSet.getString("external_sample_id"));
}
b.externalSampleId = resultSet.getString("external_sample_id");
b.geneAcc = resultSet.getString("acc");
b.geneSymbol = resultSet.getString("symbol");
b.phenotypingCenterId = resultSet.getInt("phenotyping_center_id");
b.phenotypingCenterName = resultSet.getString("phenotyping_center_name");
b.sampleGroup = resultSet.getString("sample_group");
b.sex = resultSet.getString("sex");
b.strainAcc = resultSet.getString("strain_acc");
b.strainName = resultSet.getString("strain_name");
b.geneticBackground = resultSet.getString("genetic_background");
b.zygosity = resultSet.getString("zygosity");
b.developmentalStageAcc = resultSet.getString("developmental_stage_acc");
b.developmentalStageName = resultSet.getString("developmental_stage_name");
if (featureFlags.contains("production_center_id") && featureFlags.contains("litter_id")) {
b.productionCenterId = resultSet.getInt("production_center_id");
b.productionCenterName = resultSet.getString("production_center_name");
b.litterId = resultSet.getString("litter_id");
}
biologicalData.put(resultSet.getString("biological_sample_id"), b);
}
}
}
/**
* Add all the relevant data required quickly looking up biological data associated to a biological model (really an
* experiment)
*
* @throws SQLException when a database exception occurs
*/
public void populateLineBiologicalDataMap() throws SQLException {
String query = "SELECT e.id as experiment_id, e.colony_id, e.biological_model_id, "
+ "e.organisation_id as phenotyping_center_id, org.name as phenotyping_center_name, "
+ "strain.acc as strain_acc, strain.name as strain_name, bm.genetic_background, "
+ "(select distinct allele_acc from biological_model_allele bma WHERE bma.biological_model_id=e.biological_model_id) as allele_accession, "
+ "(select distinct a.symbol from biological_model_allele bma INNER JOIN allele a on (a.acc=bma.allele_acc AND a.db_id=bma.allele_db_id) WHERE bma.biological_model_id=e.biological_model_id) as allele_symbol, "
+ "(select distinct gf_acc from biological_model_genomic_feature bmgf WHERE bmgf.biological_model_id=e.biological_model_id) as acc, "
+ "(select distinct gf.symbol from biological_model_genomic_feature bmgf INNER JOIN genomic_feature gf on gf.acc=bmgf.gf_acc WHERE bmgf.biological_model_id=e.biological_model_id) as symbol "
+ "FROM experiment e "
+ "INNER JOIN organisation org ON e.organisation_id=org.id "
+ "INNER JOIN biological_model_strain bm_strain ON bm_strain.biological_model_id=e.biological_model_id "
+ "INNER JOIN strain strain ON strain.acc=bm_strain.strain_acc "
+ "INNER JOIN biological_model bm ON bm.id = e.biological_model_id";
try (PreparedStatement p = connection.prepareStatement(query)) {
ResultSet resultSet = p.executeQuery();
while (resultSet.next()) {
BiologicalDataBean b = new BiologicalDataBean();
b.colonyId = resultSet.getString("colony_id");
b.phenotypingCenterId = resultSet.getInt("phenotyping_center_id");
b.phenotypingCenterName = resultSet.getString("phenotyping_center_name");
b.strainAcc = resultSet.getString("strain_acc");
b.strainName = resultSet.getString("strain_name");
b.geneticBackground = resultSet.getString("genetic_background");
b.alleleAccession = resultSet.getString("allele_accession");
b.alleleSymbol = resultSet.getString("allele_symbol");
b.biologicalModelId = resultSet.getInt("biological_model_id");
b.geneAcc = resultSet.getString("acc");
b.geneSymbol = resultSet.getString("symbol");
if (b.alleleAccession == null && b.colonyId != null) {
// Override the biological model with one that has the
// correct gene/allele/strain
String query2 = "SELECT DISTINCT bm.id as biological_model_id, " +
" (select distinct allele_acc from biological_model_allele bma WHERE bma.biological_model_id=bm.id) as allele_accession, " +
" (select distinct a.symbol from biological_model_allele bma INNER JOIN allele a on (a.acc=bma.allele_acc AND a.db_id=bma.allele_db_id) WHERE bma.biological_model_id=bm.id) as allele_symbol, " +
" (select distinct gf_acc from biological_model_genomic_feature bmgf WHERE bmgf.biological_model_id=bm.id) as acc, " +
" (select distinct gf.symbol from biological_model_genomic_feature bmgf INNER JOIN genomic_feature gf on gf.acc=bmgf.gf_acc WHERE bmgf.biological_model_id=bm.id) as symbol, " +
" strain.acc as strain_acc, strain.name as strain_name, bm.genetic_background " +
" FROM live_sample ls " +
" INNER JOIN biological_model_sample bms ON bms.biological_sample_id=ls.id " +
" INNER JOIN biological_model bm ON bm.id=bms.biological_model_id " +
" INNER JOIN biological_model_strain bm_strain ON bm_strain.biological_model_id=bm.id " +
" INNER JOIN strain strain ON strain.acc=bm_strain.strain_acc " +
" WHERE bm.allelic_composition !='' AND ls.colony_id = ? LIMIT 1 " ;
try (PreparedStatement p2 = connection.prepareStatement(query2)) {
p2.setString(1, resultSet.getString("colony_id"));
ResultSet resultSet2 = p2.executeQuery();
resultSet2.next();
b.strainAcc = resultSet2.getString("strain_acc");
b.strainName = resultSet2.getString("strain_name");
b.geneticBackground = resultSet2.getString("genetic_background");
b.alleleAccession = resultSet2.getString("allele_accession");
b.alleleSymbol = resultSet2.getString("allele_symbol");
b.biologicalModelId = resultSet2.getInt("biological_model_id");
b.geneAcc = resultSet2.getString("acc");
b.geneSymbol = resultSet2.getString("symbol");
}
}
lineBiologicalData.put(resultSet.getString("experiment_id"), b);
}
}
}
/**
* Add all the relevant data required for translating the category names in the cases where the category names are
* numerals, but the actual name is in the description field
*
* @throws SQLException when a database exception occurs
*/
public void populateCategoryNamesDataMap() throws SQLException {
String query = "SELECT pp.stable_id, ppo.name, ppo.description FROM phenotype_parameter pp "
+ "INNER JOIN phenotype_parameter_lnk_option pplo ON pp.id=pplo.parameter_id "
+ "INNER JOIN phenotype_parameter_option ppo ON ppo.id=pplo.option_id "
+ "WHERE ppo.name NOT REGEXP '^[a-zA-Z]' AND ppo.description!='' ";
try (PreparedStatement p = connection.prepareStatement(query)) {
ResultSet resultSet = p.executeQuery();
while (resultSet.next()) {
String stableId = resultSet.getString("stable_id");
logger.debug(" parameter_stable_id for numeric category: {}", stableId);
if (!translateCategoryNames.containsKey(stableId)) {
translateCategoryNames.put(stableId, new HashMap<>());
}
String name = resultSet.getString("name");
String description = resultSet.getString("description");
if (name.matches("[0-9]+")) {
// add .0 onto string as this is what our numerical categories look in solr!!!!
name += ".0";
translateCategoryNames.get(stableId).put(name, description);
} else {
logger.debug(" Not translating non alphabetical category for parameter: " + stableId + ", name: " + name + ", desc:" + description);
}
}
}
}
public void populateParameterAssociationMap() throws SQLException {
Map<String, String> stableIdToNameMap = this.getAllParameters();
String query = "SELECT id, observation_id, parameter_id, sequence_id, dim_id, parameter_association_value FROM parameter_association";
try (PreparedStatement p = connection.prepareStatement(query)) {
ResultSet resultSet = p.executeQuery();
while (resultSet.next()) {
Integer obsId = resultSet.getInt("observation_id");
ParameterAssociationBean pb = new ParameterAssociationBean();
pb.observationId = obsId;
pb.parameterStableId = resultSet.getString("parameter_id");
pb.parameterAssociationValue = resultSet.getString("parameter_association_value");
if (stableIdToNameMap.get(pb.parameterStableId) != null) {
pb.parameterAssociationName = stableIdToNameMap.get(pb.parameterStableId);
}
pb.sequenceId = resultSet.getString("sequence_id");
pb.dimId = resultSet.getString("dim_id");
if (!parameterAssociationMap.containsKey(obsId)) {
parameterAssociationMap.put(obsId, new ArrayList<>());
}
parameterAssociationMap.get(obsId).add(pb);
}
}
}
/**
* Return all parameter stable ids and names
*
* @throws SQLException When a database error occurrs
*/
public Map<String, String> getAllParameters() throws SQLException {
Map<String, String> parameters = new HashMap<>();
String query = "SELECT stable_id, name FROM komp2.phenotype_parameter";
try (PreparedStatement statement = getConnection().prepareStatement(query)) {
ResultSet resultSet = statement.executeQuery();
while (resultSet.next()) {
parameters.put(resultSet.getString("stable_id"), resultSet.getString("name"));
}
}
return parameters;
}
public void populateDatasourceDataMap() throws SQLException {
List<String> queries = new ArrayList<>();
queries.add("SELECT id, short_name as name, 'DATASOURCE' as datasource_type FROM external_db");
queries.add("SELECT id, name, 'PROJECT' as datasource_type FROM project");
for (String query : queries) {
try (PreparedStatement p = connection.prepareStatement(query)) {
ResultSet resultSet = p.executeQuery();
while (resultSet.next()) {
DatasourceBean b = new DatasourceBean();
b.id = resultSet.getInt("id");
b.name = resultSet.getString("name");
switch (resultSet.getString("datasource_type")) {
case "DATASOURCE":
datasourceMap.put(resultSet.getInt("id"), b);
break;
case "PROJECT":
projectMap.put(resultSet.getInt("id"), b);
break;
}
}
}
}
}
/**
* Compare all weight dates to select the nearest to the date of experiment
*
* @param specimenID the specimen
* @param dateOfExperiment the date
* @return the nearest weight bean to the date of the experiment
*/
public WeightBean getNearestWeight(Integer specimenID, ZonedDateTime dateOfExperiment) {
WeightBean nearest = null;
if (dateOfExperiment != null && weightMap.containsKey(specimenID)) {
for (WeightBean candidate : weightMap.get(specimenID)) {
if (nearest == null) {
nearest = candidate;
continue;
}
if (Math.abs(dateOfExperiment.toInstant().toEpochMilli() - candidate.date.toInstant().toEpochMilli()) < Math.abs(dateOfExperiment.toInstant().toEpochMilli() - nearest.date.toInstant().toEpochMilli())) {
nearest = candidate;
}
}
}
// Do not return weight that is > 4 days away from the experiment
// since the weight of the specimen become less and less relevant
// (Heuristic from Natasha Karp @ WTSI)
// 4 days = 345,600,000 ms
if (nearest != null && Math.abs(dateOfExperiment.toInstant().toEpochMilli() - nearest.date.toInstant().toEpochMilli()) > 3.456E8) {
nearest = null;
}
return nearest;
}
/**
* Select date of experiment
*
* @param specimenID the specimen
* @return the nearest weight bean to the date of the experiment
*/
public WeightBean getNearestIpgttWeight(Integer specimenID) {
WeightBean nearest = null;
if (ipgttWeightMap.containsKey(specimenID)) {
nearest = ipgttWeightMap.get(specimenID);
}
return nearest;
}
/**
* Return map of specimen ID => List of all weights ordered by date ASC
*
* @exception SQLException When a database error occurs
*/
public void populateWeightMap() throws SQLException {
int count=0;
String query = "SELECT\n" +
" o.biological_sample_id, \n" +
" data_point AS weight, \n" +
" parameter_stable_id, \n" +
" date_of_experiment, \n" +
" datediff(date_of_experiment, ls.date_of_birth) as days_old, \n" +
" e.organisation_id \n" +
"FROM observation o \n" +
" INNER JOIN unidimensional_observation uo ON uo.id = o.id \n" +
" INNER JOIN live_sample ls ON ls.id=o.biological_sample_id \n" +
" INNER JOIN experiment_observation eo ON o.id = eo.observation_id \n" +
" INNER JOIN experiment e ON e.id = eo.experiment_id \n" +
"WHERE parameter_stable_id IN ("+StringUtils.join(Constants.weightParameters, ",")+") AND data_point > 0" +
" ORDER BY biological_sample_id, date_of_experiment ASC \n" ;
try (PreparedStatement statement = getConnection().prepareStatement(query)) {
ResultSet resultSet = statement.executeQuery();
while (resultSet.next()) {
WeightBean b = new WeightBean();
try {
b.date = ZonedDateTime.parse(resultSet.getString("date_of_experiment"), DateTimeFormatter.ofPattern(DATETIME_FORMAT).withZone(ZoneId.of("UTC")));
} catch (NullPointerException e) {
b.date = null;
logger.debug("No date of experiment set for sample id {} parameter {}", resultSet.getString("biological_sample_id"), resultSet.getString("parameter_stable_id"));
}
b.weight = resultSet.getFloat("weight");
b.parameterStableId = resultSet.getString("parameter_stable_id");
b.daysOld = resultSet.getInt("days_old");
final Integer specimenId = resultSet.getInt("biological_sample_id");
if (!weightMap.containsKey(specimenId)) {
weightMap.put(specimenId, new ArrayList<>());
}
weightMap.get(specimenId).add(b);
count += 1;
}
}
logger.info(" Added {} specimen weight data map entries", count, weightMap.size());
}
/**
* Return map of specimen ID => weight for
*
* @exception SQLException When a database error occurrs
*/
public void populateIpgttWeightMap() throws SQLException {
String query = "SELECT o.biological_sample_id, data_point AS weight, parameter_stable_id, " +
"date_of_experiment, DATEDIFF(date_of_experiment, ls.date_of_birth) AS days_old " +
"FROM observation o " +
" INNER JOIN unidimensional_observation uo ON uo.id = o.id " +
" INNER JOIN live_sample ls ON ls.id=o.biological_sample_id " +
" INNER JOIN experiment_observation eo ON o.id = eo.observation_id " +
" INNER JOIN experiment e ON e.id = eo.experiment_id " +
"WHERE parameter_stable_id = '"+ ipgttWeightParameter +"' " ;
try (PreparedStatement statement = getConnection().prepareStatement(query)) {
ResultSet resultSet = statement.executeQuery();
while (resultSet.next()) {
WeightBean b = new WeightBean();
try {
b.date = ZonedDateTime.parse(resultSet.getString("date_of_experiment"), DateTimeFormatter.ofPattern(DATETIME_FORMAT).withZone(ZoneId.of("UTC")));
} catch (NullPointerException e) {
b.date = null;
logger.debug("No date of experiment set for sample id {} parameter {}", resultSet.getString("biological_sample_id"), resultSet.getString("parameter_stable_id"));
}
b.weight = resultSet.getFloat("weight");
b.parameterStableId = resultSet.getString("parameter_stable_id");
b.daysOld = resultSet.getInt("days_old");
final Integer specimenId = resultSet.getInt("biological_sample_id");
ipgttWeightMap.put(specimenId, b);
}
}
}
public static Connection getConnection() {
return connection;
}
public Map<String, Map<String, String>> getTranslateCategoryNames() {
return translateCategoryNames;
}
public Map<String, BiologicalDataBean> getLineBiologicalData() {
return lineBiologicalData;
}
public Map<String, BiologicalDataBean> getBiologicalData() {
return biologicalData;
}
public Map<Integer, DatasourceBean> getDatasourceMap() {
return datasourceMap;
}
public Map<Integer, DatasourceBean> getProjectMap() {
return projectMap;
}
public Map<Integer, List<WeightBean>> getWeightMap() {
return weightMap;
}
/**
* Internal class to act as Map value DTO for biological data
*/
protected class BiologicalDataBean {
public String alleleAccession;
public String alleleSymbol;
public Integer biologicalModelId;
public Integer biologicalSampleId;
public String colonyId;
public ZonedDateTime dateOfBirth;
public String externalSampleId;
public String geneAcc;
public String geneSymbol;
public String phenotypingCenterName;
public Integer phenotypingCenterId;
public String sampleGroup;
public String sex;
public String strainAcc;
public String strainName;
public String geneticBackground;
public String zygosity;
public String developmentalStageAcc;
public String developmentalStageName;
public String productionCenterName;
public Integer productionCenterId;
public String litterId;
}
/**
* Internal class to act as Map value DTO for weight data
*/
protected class WeightBean {
public String parameterStableId;
public ZonedDateTime date;
public Float weight;
public Integer daysOld;
@Override
public String toString() {
return "WeightBean{" +
"parameterStableId='" + parameterStableId + '\'' +
", date=" + date +
", weight=" + weight +
", daysOld=" + daysOld +
'}';
}
}
/**
* Internal class to act as Map value DTO for datasource data
*/
protected class DatasourceBean {
public Integer id;
public String name;
}
/**
* Internal class to act as Map value DTO for datasource data
*/
protected class ParameterAssociationBean {
public String parameterAssociationName;
public String parameterAssociationValue;
public Integer id;
public Integer observationId;
public String parameterStableId;
public String sequenceId;
public String dimId;
}
}
| Backed out Jonathan's change to populateObservationSolrCore() to see if the indexer speeds up.
| indexers/src/main/java/org/mousephenotype/cda/indexers/ObservationIndexer.java | Backed out Jonathan's change to populateObservationSolrCore() to see if the indexer speeds up. |
|
Java | apache-2.0 | b3400b3dbb5e31fc14976b89078601b0bd9050c1 | 0 | attatrol/Orekit,ProjectPersephone/Orekit,CS-SI/Orekit,treeform/orekit,wardev/orekit,ProjectPersephone/Orekit,Yakushima/Orekit,Yakushima/Orekit,petrushy/Orekit,CS-SI/Orekit,liscju/Orekit,liscju/Orekit,attatrol/Orekit,petrushy/Orekit | /* Copyright 2002-2008 CS Communication & Systèmes
* Licensed to CS Communication & Systèmes (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.propagation.sampling;
import java.io.Serializable;
import org.orekit.errors.OrekitException;
import org.orekit.errors.PropagationException;
import org.orekit.propagation.SpacecraftState;
import org.orekit.time.AbsoluteDate;
/** This interface is a space-dynamics aware step interpolator.
*
* <p>It mirrors the <code>StepInterpolator</code> interface from <a
* href="http://commons.apache.org/math/"> commons-math</a> but
* provides a space-dynamics interface to the methods.</p>
* @author Luc Maisonobe
* @version $Revision$ $Date$
*/
public interface OrekitStepInterpolator extends Serializable {
/** Get the current grid date.
* @return current grid date
*/
AbsoluteDate getCurrentDate();
/** Get the previous grid date.
* @return previous grid date
*/
AbsoluteDate getPreviousDate();
/** Get the interpolated date.
* <p>If {@link #setInterpolatedDate(AbsoluteDate) setInterpolatedDate}
* has not been called, the date returned is the same as {@link
* #getCurrentDate() getCurrentDate}.</p>
* @return interpolated date
* @see #setInterpolatedDate(AbsoluteDate)
* @see #getInterpolatedState()
*/
AbsoluteDate getInterpolatedDate();
/** Set the interpolated date.
* <p>It is possible to set the interpolation date outside of the current
* step range, but accuracy will decrease as date is farther.</p>
* @param date interpolated date to set
* @exception PropagationException if underlying interpolator cannot handle
* the date
* @see #getInterpolatedDate()
* @see #getInterpolatedState()
*/
void setInterpolatedDate(final AbsoluteDate date)
throws PropagationException;
/** Get the interpolated state.
* @return interpolated state at the current interpolation date
* @exception OrekitException if state cannot be interpolated or converted
* @see #getInterpolatedDate()
* @see #setInterpolatedDate(AbsoluteDate)
*/
SpacecraftState getInterpolatedState() throws OrekitException;
/** Check is integration direction is forward in date.
* @return true if integration is forward in date
*/
boolean isForward();
}
| src/main/java/org/orekit/propagation/sampling/OrekitStepInterpolator.java | /* Copyright 2002-2008 CS Communication & Systèmes
* Licensed to CS Communication & Systèmes (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.propagation.sampling;
import java.io.Serializable;
import org.orekit.errors.OrekitException;
import org.orekit.errors.PropagationException;
import org.orekit.propagation.SpacecraftState;
import org.orekit.time.AbsoluteDate;
/** This interface is a space-dynamics aware step interpolator.
*
* <p>It mirrors the {@link org.apache.commons.math.ode.StepInterpolator
* StepInterpolator} interface from <a href="http://commons.apache.org/math/">
* commons-math</a> but provides a space-dynamics interface to the methods.</p>
* @author Luc Maisonobe
* @version $Revision$ $Date$
*/
public interface OrekitStepInterpolator extends Serializable {
/** Get the current grid date.
* @return current grid date
*/
AbsoluteDate getCurrentDate();
/** Get the previous grid date.
* @return previous grid date
*/
AbsoluteDate getPreviousDate();
/** Get the interpolated date.
* <p>If {@link #setInterpolatedDate(AbsoluteDate) setInterpolatedDate}
* has not been called, the date returned is the same as {@link
* #getCurrentDate() getCurrentDate}.</p>
* @return interpolated date
* @see #setInterpolatedDate(AbsoluteDate)
* @see #getInterpolatedState()
*/
AbsoluteDate getInterpolatedDate();
/** Set the interpolated date.
* <p>It is possible to set the interpolation date outside of the current
* step range, but accuracy will decrease as date is farther.</p>
* @param date interpolated date to set
* @exception PropagationException if underlying interpolator cannot handle
* the date
* @see #getInterpolatedDate()
* @see #getInterpolatedState()
*/
void setInterpolatedDate(final AbsoluteDate date)
throws PropagationException;
/** Get the interpolated state.
* @return interpolated state at the current interpolation date
* @exception OrekitException if state cannot be interpolated or converted
* @see #getInterpolatedDate()
* @see #setInterpolatedDate(AbsoluteDate)
*/
SpacecraftState getInterpolatedState() throws OrekitException;
/** Check is integration direction is forward in date.
* @return true if integration is forward in date
*/
boolean isForward();
}
| fixed javadoc warnings
| src/main/java/org/orekit/propagation/sampling/OrekitStepInterpolator.java | fixed javadoc warnings |
|
Java | apache-2.0 | 02172733fdc663e675bdaf88378a849017ec2a28 | 0 | redpen-cc/redpen,gerryhocks/redpen,johtani/redpen,recruit-tech/redpen,gerryhocks/redpen,jmoriya/redpen,gerryhocks/redpen,gerryhocks/redpen,redpen-cc/redpen,kenhys/redpen,tatamiya3/redpen,kenhys/redpen,redpen-cc/redpen,tatamiya3/redpen,redpen-cc/redpen,kenhys/redpen,kenhys/redpen,johtani/redpen,redpen-cc/redpen,jmoriya/redpen,johtani/redpen,recruit-tech/redpen,recruit-tech/redpen,tatamiya3/redpen,recruit-tech/redpen,jmoriya/redpen,tatamiya3/redpen | /**
* DocumentValidator
* Copyright (c) 2013-, Takahiko Ito, All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3.0 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library.
*/
package org.unigram.docvalidator.parser;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.unigram.docvalidator.util.CharacterTable;
import org.unigram.docvalidator.util.Configuration;
import org.unigram.docvalidator.util.DVResource;
import org.unigram.docvalidator.util.DocumentValidatorException;
import org.unigram.docvalidator.DefaultSymbols;
/**
* Abstract Parser class containing common procedures to
* implements the concrete Parser classes.
*/
public abstract class AbstractDocumentParser implements Parser {
/**
* load basic configuration settings.
* @param resource object containing configuration settings
*/
public final boolean initialize(DVResource resource) {
if (resource == null) {
return false;
}
Configuration conf = resource.getConfiguration();
CharacterTable characterTable = resource.getCharacterTable();
this.comment = DefaultSymbols.get("COMMENT").getValue();
if (conf.hasAttribute("comment")) {
this.comment = conf.getAttribute("comment");
}
this.comma = DefaultSymbols.get("COMMA").getValue();
if (characterTable.isContainCharacter("COMMA")) {
this.comma = characterTable.getCharacter("COMMA").getValue();
LOG.info("comma is set to \"" + this.comma + "\"");
}
this.period = DefaultSymbols.get("FULL_STOP").getValue();
if (characterTable.isContainCharacter("FULL_STOP")) {
this.period = characterTable.getCharacter("FULL_STOP").getValue();
LOG.info("full stop is set to \"" + this.period + "\"");
}
return true;
}
protected final InputStream loadStream(String fileName)
throws DocumentValidatorException {
InputStream inputStream = null;
if (fileName == null || fileName.equals("")) {
LOG.error("input file was not specified.");
return null;
} else {
try {
inputStream = new FileInputStream(fileName);
} catch (FileNotFoundException e) {
LOG.error("Input file is not fould: " + e.getMessage());
}
}
return inputStream;
}
protected String comma;
protected String comment;
protected String period;
private static Logger LOG = LoggerFactory.getLogger(Parser.class);
}
| src/main/java/org/unigram/docvalidator/parser/AbstractDocumentParser.java | package org.unigram.docvalidator.parser;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.unigram.docvalidator.util.CharacterTable;
import org.unigram.docvalidator.util.Configuration;
import org.unigram.docvalidator.util.DVResource;
import org.unigram.docvalidator.util.DocumentValidatorException;
import org.unigram.docvalidator.DefaultSymbols;
/**
* Abstract Parser class containing common procedures to
* implements the concrete Parser classes.
*/
public abstract class AbstractDocumentParser implements Parser {
/**
* load basic configuration settings.
* @param resource object containing configuration settings
*/
public final boolean initialize(DVResource resource) {
if (resource == null) {
return false;
}
Configuration conf = resource.getConfiguration();
CharacterTable characterTable = resource.getCharacterTable();
this.comment = DefaultSymbols.get("COMMENT").getValue();
if (conf.hasAttribute("comment")) {
this.comment = conf.getAttribute("comment");
}
this.comma = DefaultSymbols.get("COMMA").getValue();
if (characterTable.isContainCharacter("COMMA")) {
this.comma = characterTable.getCharacter("COMMA").getValue();
LOG.info("comma is set to \"" + this.comma + "\"");
}
this.period = DefaultSymbols.get("FULL_STOP").getValue();
if (characterTable.isContainCharacter("FULL_STOP")) {
this.period = characterTable.getCharacter("FULL_STOP").getValue();
LOG.info("full stop is set to \"" + this.period + "\"");
}
return true;
}
protected final InputStream loadStream(String fileName)
throws DocumentValidatorException {
InputStream inputStream = null;
if (fileName == null || fileName.equals("")) {
LOG.error("input file was not specified.");
return null;
} else {
try {
inputStream = new FileInputStream(fileName);
} catch (FileNotFoundException e) {
LOG.error("Input file is not fould: " + e.getMessage());
}
}
return inputStream;
}
protected String comma;
protected String comment;
protected String period;
private static Logger LOG = LoggerFactory.getLogger(Parser.class);
}
| Add license header to a file
| src/main/java/org/unigram/docvalidator/parser/AbstractDocumentParser.java | Add license header to a file |
|
Java | bsd-2-clause | 6706b4496d3ea19f324e97f2190270c4d7aebf8e | 0 | oblac/jodd,oblac/jodd,oblac/jodd,mosoft521/jodd,mosoft521/jodd,oblac/jodd,mosoft521/jodd,mosoft521/jodd | // Copyright (c) 2003-present, Jodd Team (http://jodd.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package jodd.madvoc;
import jodd.exception.UncheckedException;
import jodd.util.SystemUtil;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assumptions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Nested;
class MadvocSuiteTest {
/**
* Starts Tomcat after the suite.
*/
@BeforeAll
static void beforeClass() {
isSuite = true;
startTomcat();
Assumptions.assumeTrue(SystemUtil.javaVersionNumber() == 8,
"The complete suite can not be tested on Java9 as Multi-Release JARS do not work in exploded mode.");
}
/**
* Stop Tomcat after the suite.
*/
@AfterAll
static void afterSuite() {
isSuite = false;
stopTomcat();
}
public static boolean isSuite;
// ---------------------------------------------------------------- tomcat
protected static TomcatTestServer server;
/**
* Starts Tomcat.
*/
protected static void startTomcat(String webXmlFileName) {
if (server != null) {
return;
}
server = new TomcatTestServer(webXmlFileName);
try {
server.start();
System.out.println("Tomcat test server started");
} catch (Exception e) {
throw new UncheckedException(e);
}
}
/**
* Stops Tomcat if not in the suite.
*/
public static void stopTomcat() {
if (server == null) {
return;
}
if (isSuite) { // don't stop tomcat if it we are still running in the suite!
return;
}
try {
server.stop();
} catch (Exception ignore) {
} finally {
System.out.println("Tomcat test server stopped");
server = null;
}
}
public static void startTomcat() {
startTomcat("web-test-int.xml");
}
// ---------------------------------------------------------------- go
@Nested
class HelloActionTest extends HelloActionTestBase {}
@Nested
class SimpleTest extends SimpleTestBase {}
@Nested
class RawActionTest extends RawActionTestBase {}
@Nested
class UrlActionTest extends UrlActionTestBase {}
@Nested
class OneTwoActionTest extends OneTwoActionTestBase {}
@Nested
class IntcptActionTest extends IntcptActionTestBase {}
@Nested
class RestActionTest extends RestActionTestBase {}
@Nested
class FilterTest extends FilterTestBase {}
@Nested
class SessionScopeTest extends SessionScopeTestBase {}
@Nested
class AlphaTest extends AlphaTestBase {}
@Nested
class ArgsTest extends ArgsTestBase {}
@Nested
class TypesTest extends TypesTestBase {}
@Nested
class ExcTest extends ExcTestBase {}
@Nested
class UserActionTest extends UserActionTestBase {}
@Nested
class AsyncTest extends AsyncTestBase {}
@Nested
class MoveTest extends MoveTestBase {}
@Nested
class BookActionTest extends BookActionTestBase {}
@Nested
class ResultsTest extends ResultsTestBase {}
@Nested
class TagActionTest extends TagActionTestBase {}
@Nested
class MissingActionTest extends MissingActionTestBase {}
@Nested
class ComponentTest extends ComponentTestBase {}
}
| jodd-madvoc/src/testInt/java/jodd/madvoc/MadvocSuiteTest.java | // Copyright (c) 2003-present, Jodd Team (http://jodd.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package jodd.madvoc;
import jodd.exception.UncheckedException;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Nested;
class MadvocSuiteTest {
/**
* Starts Tomcat after the suite.
*/
@BeforeAll
static void beforeClass() {
isSuite = true;
startTomcat();
}
/**
* Stop Tomcat after the suite.
*/
@AfterAll
static void afterSuite() {
isSuite = false;
stopTomcat();
}
public static boolean isSuite;
// ---------------------------------------------------------------- tomcat
protected static TomcatTestServer server;
/**
* Starts Tomcat.
*/
protected static void startTomcat(String webXmlFileName) {
if (server != null) {
return;
}
server = new TomcatTestServer(webXmlFileName);
try {
server.start();
System.out.println("Tomcat test server started");
} catch (Exception e) {
throw new UncheckedException(e);
}
}
/**
* Stops Tomcat if not in the suite.
*/
public static void stopTomcat() {
if (server == null) {
return;
}
if (isSuite) { // don't stop tomcat if it we are still running in the suite!
return;
}
try {
server.stop();
} catch (Exception ignore) {
} finally {
System.out.println("Tomcat test server stopped");
server = null;
}
}
public static void startTomcat() {
startTomcat("web-test-int.xml");
}
// ---------------------------------------------------------------- go
@Nested
class HelloActionTest extends HelloActionTestBase {}
@Nested
class SimpleTest extends SimpleTestBase {}
@Nested
class RawActionTest extends RawActionTestBase {}
@Nested
class UrlActionTest extends UrlActionTestBase {}
@Nested
class OneTwoActionTest extends OneTwoActionTestBase {}
@Nested
class IntcptActionTest extends IntcptActionTestBase {}
@Nested
class RestActionTest extends RestActionTestBase {}
@Nested
class FilterTest extends FilterTestBase {}
@Nested
class SessionScopeTest extends SessionScopeTestBase {}
@Nested
class AlphaTest extends AlphaTestBase {}
@Nested
class ArgsTest extends ArgsTestBase {}
@Nested
class TypesTest extends TypesTestBase {}
@Nested
class ExcTest extends ExcTestBase {}
@Nested
class UserActionTest extends UserActionTestBase {}
@Nested
class AsyncTest extends AsyncTestBase {}
@Nested
class MoveTest extends MoveTestBase {}
@Nested
class BookActionTest extends BookActionTestBase {}
@Nested
class ResultsTest extends ResultsTestBase {}
@Nested
class TagActionTest extends TagActionTestBase {}
@Nested
class MissingActionTest extends MissingActionTestBase {}
@Nested
class ComponentTest extends ComponentTestBase {}
}
| Java9 Multi-Release JARs are PITA.
| jodd-madvoc/src/testInt/java/jodd/madvoc/MadvocSuiteTest.java | Java9 Multi-Release JARs are PITA. |
|
Java | mit | 37ef5a882c6eb5ef9fd3c49983ffe9052a9b0838 | 0 | JavaPeppers/jrpg-2017b-cliente | package frames;
import java.awt.Color;
import java.awt.Font;
import java.awt.Point;
import java.awt.Toolkit;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.io.IOException;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JLayeredPane;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JTextField;
import javax.swing.WindowConstants;
import javax.swing.border.EmptyBorder;
import com.google.gson.Gson;
import com.google.gson.JsonSyntaxException;
import cliente.Cliente;
import mensajeria.Comando;
import mensajeria.PaquetePersonaje;
/**
* The Class MenuCreacionPj.
*/
public class MenuCreacionPj extends JFrame {
/**
* The Constant COLUMNASNOMBRE.
*/
private static final int COLUMNASNOMBRE = 10;
/**
* The Constant TAMANIOQUINCE.
*/
private static final int TAMANIOQUINCE = 15;
/**
* The Constant TRES.
*/
private static final int TRES = 3;
/**
* The Constant TAMANIOTRECE.
*/
private static final int TAMANIOTRECE = 13;
/**
* The Constant WOMECREARPERSONAJE.
*/
private static final int[] WOMECREARPERSONAJE = {100, 100, 450, 300 };
/**
* The Constant WOMECREARPERSONAJE2.
*/
private static final int[] WOMECREARPERSONAJE2 = {5, 5, 5, 5 };
/**
* The Constant LAYAREDPANEL.
*/
private static final int[] LAYAREDPANEL = {0, 0, 444, 271 };
/**
* The Constant LABELFUERZA.
*/
private static final int[] LABELFUERZA = {33, 100, 46, 14 };
/**
* The Constant FUERZA.
*/
private static final int[] FUERZA = {110, 102, 22, 14 };
/**
* The Constant LABELDESTREZA.
*/
private static final int[] LABELDESTREZA = {33, 126, 60, 14 };
/**
* The Constant DESTREZA.
*/
private static final int[] DESTREZA = {110, 127, 22, 14 };
/**
* The Constant LABELINTELIGENCIA.
*/
private static final int[] LABELINTELIGENCIA = {33, 151, 66, 22 };
/**
* The Constant INTELIGENCIA.
*/
private static final int[] INTELIGENCIA = {110, 156, 22, 14 };
/**
* The Constant LABELSALUD.
*/
private static final int[] LABELSALUD = {33, 183, 46, 14 };
/**
* The Constant SALUD.
*/
private static final int[] SALUD = {110, 183, 22, 14 };
/**
* The Constant LABELENERGIA.
*/
private static final int[] LABELENERGIA = {33, 204, 46, 20 };
/**
* The Constant ENERGIA.
*/
private static final int[] ENERGIA = {110, 208, 22, 14 };
/**
* The Constant LABELNOMBRE.
*/
private static final int[] LABELNOMBRE = {207, 125, 60, 14 };
/**
* The Constant NOMBRE.
*/
private static final int[] NOMBRE = {277, 122, 122, 20 };
/**
* The Constant LABELACEPTAR.
*/
private static final int[] LABELACEPTAR = {280, 173, 50, 24 };
/**
* The Constant BOTONACEPTAR.
*/
private static final int[] BOTONACEPTAR = {230, 174, 153, 23 };
/**
* The Constant LABELRAZA.
*/
private static final int[] LABELRAZA = {33, 23, 46, 14 };
/**
* The Constant LABELCASTA.
*/
private static final int[] LABELCASTA = {161, 23, 46, 14 };
/**
* The Constant CBXCASTA.
*/
private static final int[] CBXCASTA = {161, 48, 76, 20 };
/**
* The Constant CBXRAZA.
*/
private static final int[] CBXRAZA = {32, 48, 76, 20 };
/**
* The Constant LABELBACKGROUND.
*/
private static final int[] LABELBACKGROUND = {0, 0, 444, 271 };
/**
* The content pane.
*/
private JPanel contentPane;
/**
* The nombre.
*/
private JTextField nombre;
/**
* The destreza.
*/
private JLabel destreza;
/**
* The fuerza.
*/
private JLabel fuerza;
/**
* The inteligencia.
*/
private JLabel inteligencia;
/**
* The salud.
*/
private JLabel salud;
/**
* The energia.
*/
private JLabel energia;
/**
* The cbx casta.
*/
private JComboBox<String> cbxCasta;
/**
* The cbx raza.
*/
private JComboBox<String> cbxRaza;
/**
* Instantiates a new menu creacion pj.
*
* @param cliente
* the cliente
* @param personaje
* the personaje
* @param gson
* the gson
*/
public MenuCreacionPj(final Cliente cliente,
final PaquetePersonaje personaje, final Gson gson) {
setIconImage(Toolkit.getDefaultToolkit()
.getImage("src/main/java/frames/IconoWome.png"));
setCursor(Toolkit.getDefaultToolkit().createCustomCursor(
new ImageIcon(MenuJugar.class.getResource("/cursor.png"))
.getImage(),
new Point(0, 0), "custom cursor"));
final String[] vecSalud = {"55", "50", "60" };
final String[] vecEnergia = {"55", "60", "50" };
final String[] vecFuerza = {"15", "10", "10" };
final String[] vecDestreza = {"10", "10", "15" };
final String[] vecInteligencia = {"10", "15", "10" };
// En caso de cerrar
addWindowListener(new WindowAdapter() {
@Override
public void windowClosing(final WindowEvent e) {
personaje.setNombre(nombre.getText());
if (nombre.getText().equals("")) {
personaje.setNombre("nameless");
}
personaje.setRaza((String) cbxRaza.getSelectedItem());
personaje.setSaludTope(
Integer.parseInt(vecSalud[cbxRaza.getSelectedIndex()]));
personaje.setEnergiaTope(Integer
.parseInt(vecEnergia[cbxRaza.getSelectedIndex()]));
personaje.setCasta((String) cbxCasta.getSelectedItem());
personaje.setFuerza(Integer
.parseInt(vecFuerza[cbxCasta.getSelectedIndex()]));
personaje.setDestreza(Integer
.parseInt(vecDestreza[cbxCasta.getSelectedIndex()]));
personaje.setInteligencia(Integer.parseInt(
vecInteligencia[cbxCasta.getSelectedIndex()]));
synchronized (cliente) {
cliente.notify();
}
dispose();
}
});
setTitle("WOME - Crear personaje");
setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE);
setResizable(false);
setBounds(WOMECREARPERSONAJE[0], WOMECREARPERSONAJE[1],
WOMECREARPERSONAJE[2], WOMECREARPERSONAJE[TRES]);
contentPane = new JPanel();
contentPane.setBorder(
new EmptyBorder(WOMECREARPERSONAJE2[0], WOMECREARPERSONAJE2[1],
WOMECREARPERSONAJE2[2], WOMECREARPERSONAJE2[TRES]));
setContentPane(contentPane);
contentPane.setLayout(null);
setLocationRelativeTo(null);
JLayeredPane layeredPane = new JLayeredPane();
layeredPane.setBounds(LAYAREDPANEL[0], LAYAREDPANEL[1], LAYAREDPANEL[2],
LAYAREDPANEL[TRES]);
contentPane.add(layeredPane);
JLabel lblNewLabel5 = new JLabel("Fuerza");
lblNewLabel5.setBounds(LABELFUERZA[1], LABELFUERZA[1], LABELFUERZA[2],
LABELFUERZA[TRES]);
layeredPane.add(lblNewLabel5, new Integer(1));
lblNewLabel5.setForeground(Color.WHITE);
lblNewLabel5.setFont(new Font("Tahoma", Font.PLAIN, TAMANIOTRECE));
fuerza = new JLabel("15");
fuerza.setBounds(FUERZA[1], FUERZA[1], FUERZA[2], FUERZA[TRES]);
layeredPane.add(fuerza, new Integer(1));
fuerza.setForeground(Color.GREEN);
JLabel lblDestreza = new JLabel("Destreza");
lblDestreza.setBounds(LABELDESTREZA[1], LABELDESTREZA[1],
LABELDESTREZA[2], LABELDESTREZA[TRES]);
layeredPane.add(lblDestreza, new Integer(1));
lblDestreza.setForeground(Color.WHITE);
lblDestreza.setFont(new Font("Tahoma", Font.PLAIN, TAMANIOTRECE));
destreza = new JLabel("10");
destreza.setBounds(DESTREZA[1], DESTREZA[1], DESTREZA[2],
DESTREZA[TRES]);
layeredPane.add(destreza, new Integer(1));
destreza.setForeground(Color.GREEN);
JLabel lblInteligencia = new JLabel("Inteligencia");
lblInteligencia.setBounds(LABELINTELIGENCIA[1], LABELINTELIGENCIA[1],
LABELINTELIGENCIA[2], LABELINTELIGENCIA[TRES]);
layeredPane.add(lblInteligencia, new Integer(1));
lblInteligencia.setFont(new Font("Tahoma", Font.PLAIN, TAMANIOTRECE));
lblInteligencia.setForeground(Color.WHITE);
inteligencia = new JLabel("10");
inteligencia.setBounds(INTELIGENCIA[1], INTELIGENCIA[1],
INTELIGENCIA[2], INTELIGENCIA[TRES]);
layeredPane.add(inteligencia, new Integer(1));
inteligencia.setForeground(Color.GREEN);
JLabel lblSalud = new JLabel("Salud");
lblSalud.setBounds(LABELSALUD[1], LABELSALUD[1], LABELSALUD[2],
LABELSALUD[TRES]);
layeredPane.add(lblSalud, new Integer(1));
lblSalud.setFont(new Font("Tahoma", Font.PLAIN, TAMANIOTRECE));
lblSalud.setForeground(Color.WHITE);
salud = new JLabel("55");
salud.setBounds(SALUD[1], SALUD[1], SALUD[2], SALUD[TRES]);
layeredPane.add(salud, new Integer(1));
salud.setForeground(Color.GREEN);
JLabel lblEnergia = new JLabel("Energia");
lblEnergia.setBounds(LABELENERGIA[1], LABELENERGIA[1], LABELENERGIA[2],
LABELENERGIA[TRES]);
layeredPane.add(lblEnergia, new Integer(1));
lblEnergia.setForeground(Color.WHITE);
lblEnergia.setFont(new Font("Tahoma", Font.PLAIN, TAMANIOTRECE));
energia = new JLabel("55");
energia.setBounds(ENERGIA[1], ENERGIA[1], ENERGIA[2], ENERGIA[TRES]);
layeredPane.add(energia, new Integer(1));
energia.setForeground(Color.GREEN);
JLabel lblNewLabel4 = new JLabel("Nombre");
lblNewLabel4.setBounds(LABELNOMBRE[0], LABELNOMBRE[1], LABELNOMBRE[2],
LABELNOMBRE[TRES]);
layeredPane.add(lblNewLabel4, new Integer(1));
lblNewLabel4.setForeground(Color.WHITE);
lblNewLabel4.setFont(new Font("Tahoma", Font.PLAIN, TAMANIOQUINCE));
nombre = new JTextField();
nombre.addActionListener(new ActionListener() {
public void actionPerformed(final ActionEvent arg0) {
crearPj(cliente, personaje, gson, vecSalud, vecEnergia,
vecFuerza, vecDestreza, vecInteligencia);
}
});
nombre.setBounds(NOMBRE[0], NOMBRE[1], NOMBRE[2], NOMBRE[TRES]);
layeredPane.add(nombre, new Integer(1));
nombre.setColumns(COLUMNASNOMBRE);
JLabel lblAceptar = new JLabel("Aceptar");
lblAceptar.setBounds(LABELACEPTAR[0], LABELACEPTAR[1], LABELACEPTAR[2],
LABELACEPTAR[TRES]);
layeredPane.add(lblAceptar, new Integer(2));
lblAceptar.setForeground(Color.WHITE);
lblAceptar.setFont(new Font("Tahoma", Font.PLAIN, TAMANIOQUINCE));
// En caso de apretar el boton aceptar
JButton btnAceptar = new JButton("Aceptar");
btnAceptar.setBounds(BOTONACEPTAR[0], BOTONACEPTAR[1], BOTONACEPTAR[2],
BOTONACEPTAR[TRES]);
layeredPane.add(btnAceptar, new Integer(1));
btnAceptar.setFocusable(false);
btnAceptar.setIcon(new ImageIcon(
MenuCreacionPj.class.getResource("/frames/BotonMenu.png")));
btnAceptar.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
crearPj(cliente, personaje, gson, vecSalud, vecEnergia,
vecFuerza, vecDestreza, vecInteligencia);
}
});
JLabel lblNewLabel = new JLabel("Raza");
lblNewLabel.setBounds(LABELRAZA[0], LABELRAZA[1], LABELRAZA[2],
LABELRAZA[TRES]);
layeredPane.add(lblNewLabel, new Integer(1));
lblNewLabel.setForeground(Color.WHITE);
lblNewLabel.setFont(new Font("Tahoma", Font.PLAIN, TAMANIOQUINCE));
JLabel lblCasta = new JLabel("Casta");
lblCasta.setBounds(LABELCASTA[0], LABELCASTA[1], LABELCASTA[2],
LABELCASTA[TRES]);
layeredPane.add(lblCasta, new Integer(1));
lblCasta.setForeground(Color.WHITE);
lblCasta.setFont(new Font("Tahoma", Font.PLAIN, TAMANIOQUINCE));
cbxCasta = new JComboBox<>();
cbxCasta.setBounds(CBXCASTA[0], CBXCASTA[1], CBXCASTA[2],
CBXCASTA[TRES]);
layeredPane.add(cbxCasta, new Integer(1));
cbxCasta.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
fuerza.setText(vecFuerza[cbxCasta.getSelectedIndex()]);
destreza.setText(vecDestreza[cbxCasta.getSelectedIndex()]);
inteligencia
.setText(vecInteligencia[cbxCasta.getSelectedIndex()]);
}
});
cbxCasta.addItem("Guerrero");
cbxCasta.addItem("Hechicero");
cbxCasta.addItem("Asesino");
cbxRaza = new JComboBox<>();
cbxRaza.setBounds(CBXRAZA[0], CBXRAZA[1], CBXRAZA[2], CBXRAZA[TRES]);
layeredPane.add(cbxRaza, new Integer(1));
cbxRaza.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
salud.setText(vecSalud[cbxRaza.getSelectedIndex()]);
energia.setText(vecEnergia[cbxRaza.getSelectedIndex()]);
}
});
cbxRaza.addItem("Humano");
cbxRaza.addItem("Elfo");
cbxRaza.addItem("Orco");
JLabel lblBackground = new JLabel("");
lblBackground.setBounds(LABELBACKGROUND[0], LABELBACKGROUND[1],
LABELBACKGROUND[2], LABELBACKGROUND[TRES]);
layeredPane.add(lblBackground, new Integer(0));
lblBackground.setIcon(new ImageIcon(MenuCreacionPj.class
.getResource("/frames/menuBackground.jpg")));
}
/**
* Crear pj.
*
* @param cliente
* the cliente
* @param personaje
* the personaje
* @param gson
* the gson
* @param vecSalud
* the vec salud
* @param vecEnergia
* the vec energia
* @param vecFuerza
* the vec fuerza
* @param vecDestreza
* the vec destreza
* @param vecInteligencia
* the vec inteligencia
*/
protected void crearPj(final Cliente cliente,
final PaquetePersonaje personaje, final Gson gson,
final String[] vecSalud, final String[] vecEnergia,
final String[] vecFuerza, final String[] vecDestreza,
final String[] vecInteligencia) {
personaje.setNombre(nombre.getText());
if (nombre.getText().equals("")) {
personaje.setNombre("nameless");
}
personaje.setRaza((String) cbxRaza.getSelectedItem());
personaje.setSaludTope(
Integer.parseInt(vecSalud[cbxRaza.getSelectedIndex()]));
personaje.setEnergiaTope(
Integer.parseInt(vecEnergia[cbxRaza.getSelectedIndex()]));
personaje.setCasta((String) cbxCasta.getSelectedItem());
personaje.setFuerza(
Integer.parseInt(vecFuerza[cbxCasta.getSelectedIndex()]));
personaje.setDestreza(
Integer.parseInt(vecDestreza[cbxCasta.getSelectedIndex()]));
personaje.setInteligencia(
Integer.parseInt(vecInteligencia[cbxCasta.getSelectedIndex()]));
try {
// Le envio los datos al servidor
cliente.getPaquetePersonaje().setComando(Comando.CREACIONPJ);
cliente.getSalida()
.writeObject(gson.toJson(cliente.getPaquetePersonaje()));
dispose();
} catch (JsonSyntaxException | IOException esd) {
JOptionPane.showMessageDialog(null, "Error al crear personaje");
}
}
}
| src/main/java/frames/MenuCreacionPj.java | package frames;
import java.awt.Color;
import java.awt.Font;
import java.awt.Point;
import java.awt.Toolkit;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.io.IOException;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JLayeredPane;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JTextField;
import javax.swing.WindowConstants;
import javax.swing.border.EmptyBorder;
import com.google.gson.Gson;
import com.google.gson.JsonSyntaxException;
import cliente.Cliente;
import mensajeria.Comando;
import mensajeria.PaquetePersonaje;
/**
* The Class MenuCreacionPj.
*/
public class MenuCreacionPj extends JFrame {
/**
* The content pane.
*/
private JPanel contentPane;
/**
* The nombre.
*/
private JTextField nombre;
/**
* The destreza.
*/
private JLabel destreza;
/**
* The fuerza.
*/
private JLabel fuerza;
/**
* The inteligencia.
*/
private JLabel inteligencia;
/**
* The salud.
*/
private JLabel salud;
/**
* The energia.
*/
private JLabel energia;
/**
* The cbx casta.
*/
private JComboBox<String> cbxCasta;
/**
* The cbx raza.
*/
private JComboBox<String> cbxRaza;
/**
* Instantiates a new menu creacion pj.
*
* @param cliente
* the cliente
* @param personaje
* the personaje
* @param gson
* the gson
*/
public MenuCreacionPj(final Cliente cliente,
final PaquetePersonaje personaje, final Gson gson) {
setIconImage(Toolkit.getDefaultToolkit()
.getImage("src/main/java/frames/IconoWome.png"));
setCursor(Toolkit.getDefaultToolkit().createCustomCursor(
new ImageIcon(MenuJugar.class.getResource("/cursor.png"))
.getImage(),
new Point(0, 0), "custom cursor"));
final String[] vecSalud = {"55", "50", "60" };
final String[] vecEnergia = {"55", "60", "50" };
final String[] vecFuerza = {"15", "10", "10" };
final String[] vecDestreza = {"10", "10", "15" };
final String[] vecInteligencia = {"10", "15", "10" };
// En caso de cerrar
addWindowListener(new WindowAdapter() {
@Override
public void windowClosing(final WindowEvent e) {
personaje.setNombre(nombre.getText());
if (nombre.getText().equals("")) {
personaje.setNombre("nameless");
}
personaje.setRaza((String) cbxRaza.getSelectedItem());
personaje.setSaludTope(
Integer.parseInt(vecSalud[cbxRaza.getSelectedIndex()]));
personaje.setEnergiaTope(Integer
.parseInt(vecEnergia[cbxRaza.getSelectedIndex()]));
personaje.setCasta((String) cbxCasta.getSelectedItem());
personaje.setFuerza(Integer
.parseInt(vecFuerza[cbxCasta.getSelectedIndex()]));
personaje.setDestreza(Integer
.parseInt(vecDestreza[cbxCasta.getSelectedIndex()]));
personaje.setInteligencia(Integer.parseInt(
vecInteligencia[cbxCasta.getSelectedIndex()]));
synchronized (cliente) {
cliente.notify();
}
dispose();
}
});
setTitle("WOME - Crear personaje");
setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE);
setResizable(false);
setBounds(100, 100, 450, 300);
contentPane = new JPanel();
contentPane.setBorder(new EmptyBorder(5, 5, 5, 5));
setContentPane(contentPane);
contentPane.setLayout(null);
setLocationRelativeTo(null);
JLayeredPane layeredPane = new JLayeredPane();
layeredPane.setBounds(0, 0, 444, 271);
contentPane.add(layeredPane);
JLabel lblNewLabel5 = new JLabel("Fuerza");
lblNewLabel5.setBounds(33, 100, 46, 14);
layeredPane.add(lblNewLabel5, new Integer(1));
lblNewLabel5.setForeground(Color.WHITE);
lblNewLabel5.setFont(new Font("Tahoma", Font.PLAIN, 13));
fuerza = new JLabel("15");
fuerza.setBounds(110, 102, 22, 14);
layeredPane.add(fuerza, new Integer(1));
fuerza.setForeground(Color.GREEN);
JLabel lblDestreza = new JLabel("Destreza");
lblDestreza.setBounds(33, 126, 60, 14);
layeredPane.add(lblDestreza, new Integer(1));
lblDestreza.setForeground(Color.WHITE);
lblDestreza.setFont(new Font("Tahoma", Font.PLAIN, 13));
destreza = new JLabel("10");
destreza.setBounds(110, 127, 22, 14);
layeredPane.add(destreza, new Integer(1));
destreza.setForeground(Color.GREEN);
JLabel lblInteligencia = new JLabel("Inteligencia");
lblInteligencia.setBounds(33, 151, 66, 22);
layeredPane.add(lblInteligencia, new Integer(1));
lblInteligencia.setFont(new Font("Tahoma", Font.PLAIN, 13));
lblInteligencia.setForeground(Color.WHITE);
inteligencia = new JLabel("10");
inteligencia.setBounds(110, 156, 22, 14);
layeredPane.add(inteligencia, new Integer(1));
inteligencia.setForeground(Color.GREEN);
JLabel lblSalud = new JLabel("Salud");
lblSalud.setBounds(33, 183, 46, 14);
layeredPane.add(lblSalud, new Integer(1));
lblSalud.setFont(new Font("Tahoma", Font.PLAIN, 13));
lblSalud.setForeground(Color.WHITE);
salud = new JLabel("55");
salud.setBounds(110, 183, 22, 14);
layeredPane.add(salud, new Integer(1));
salud.setForeground(Color.GREEN);
JLabel lblEnergia = new JLabel("Energia");
lblEnergia.setBounds(33, 204, 46, 20);
layeredPane.add(lblEnergia, new Integer(1));
lblEnergia.setForeground(Color.WHITE);
lblEnergia.setFont(new Font("Tahoma", Font.PLAIN, 13));
energia = new JLabel("55");
energia.setBounds(110, 208, 22, 14);
layeredPane.add(energia, new Integer(1));
energia.setForeground(Color.GREEN);
JLabel lblNewLabel4 = new JLabel("Nombre");
lblNewLabel4.setBounds(207, 125, 60, 14);
layeredPane.add(lblNewLabel4, new Integer(1));
lblNewLabel4.setForeground(Color.WHITE);
lblNewLabel4.setFont(new Font("Tahoma", Font.PLAIN, 15));
nombre = new JTextField();
nombre.addActionListener(new ActionListener() {
public void actionPerformed(final ActionEvent arg0) {
crearPj(cliente, personaje, gson, vecSalud, vecEnergia,
vecFuerza, vecDestreza, vecInteligencia);
}
});
nombre.setBounds(277, 122, 122, 20);
layeredPane.add(nombre, new Integer(1));
nombre.setColumns(10);
JLabel lblAceptar = new JLabel("Aceptar");
lblAceptar.setBounds(280, 173, 50, 24);
layeredPane.add(lblAceptar, new Integer(2));
lblAceptar.setForeground(Color.WHITE);
lblAceptar.setFont(new Font("Tahoma", Font.PLAIN, 15));
// En caso de apretar el boton aceptar
JButton btnAceptar = new JButton("Aceptar");
btnAceptar.setBounds(230, 174, 153, 23);
layeredPane.add(btnAceptar, new Integer(1));
btnAceptar.setFocusable(false);
btnAceptar.setIcon(new ImageIcon(
MenuCreacionPj.class.getResource("/frames/BotonMenu.png")));
btnAceptar.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
crearPj(cliente, personaje, gson, vecSalud, vecEnergia,
vecFuerza, vecDestreza, vecInteligencia);
}
});
JLabel lblNewLabel = new JLabel("Raza");
lblNewLabel.setBounds(33, 23, 46, 14);
layeredPane.add(lblNewLabel, new Integer(1));
lblNewLabel.setForeground(Color.WHITE);
lblNewLabel.setFont(new Font("Tahoma", Font.PLAIN, 15));
JLabel lblCasta = new JLabel("Casta");
lblCasta.setBounds(161, 23, 46, 14);
layeredPane.add(lblCasta, new Integer(1));
lblCasta.setForeground(Color.WHITE);
lblCasta.setFont(new Font("Tahoma", Font.PLAIN, 15));
cbxCasta = new JComboBox<>();
cbxCasta.setBounds(161, 48, 76, 20);
layeredPane.add(cbxCasta, new Integer(1));
cbxCasta.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
fuerza.setText(vecFuerza[cbxCasta.getSelectedIndex()]);
destreza.setText(vecDestreza[cbxCasta.getSelectedIndex()]);
inteligencia
.setText(vecInteligencia[cbxCasta.getSelectedIndex()]);
}
});
cbxCasta.addItem("Guerrero");
cbxCasta.addItem("Hechicero");
cbxCasta.addItem("Asesino");
cbxRaza = new JComboBox<>();
cbxRaza.setBounds(32, 48, 76, 20);
layeredPane.add(cbxRaza, new Integer(1));
cbxRaza.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
salud.setText(vecSalud[cbxRaza.getSelectedIndex()]);
energia.setText(vecEnergia[cbxRaza.getSelectedIndex()]);
}
});
cbxRaza.addItem("Humano");
cbxRaza.addItem("Elfo");
cbxRaza.addItem("Orco");
JLabel lblBackground = new JLabel("");
lblBackground.setBounds(0, 0, 444, 271);
layeredPane.add(lblBackground, new Integer(0));
lblBackground.setIcon(new ImageIcon(MenuCreacionPj.class
.getResource("/frames/menuBackground.jpg")));
}
/**
* Crear pj.
*
* @param cliente
* the cliente
* @param personaje
* the personaje
* @param gson
* the gson
* @param vecSalud
* the vec salud
* @param vecEnergia
* the vec energia
* @param vecFuerza
* the vec fuerza
* @param vecDestreza
* the vec destreza
* @param vecInteligencia
* the vec inteligencia
*/
protected void crearPj(final Cliente cliente,
final PaquetePersonaje personaje, final Gson gson,
final String[] vecSalud, final String[] vecEnergia,
final String[] vecFuerza, final String[] vecDestreza,
final String[] vecInteligencia) {
personaje.setNombre(nombre.getText());
if (nombre.getText().equals("")) {
personaje.setNombre("nameless");
}
personaje.setRaza((String) cbxRaza.getSelectedItem());
personaje.setSaludTope(
Integer.parseInt(vecSalud[cbxRaza.getSelectedIndex()]));
personaje.setEnergiaTope(
Integer.parseInt(vecEnergia[cbxRaza.getSelectedIndex()]));
personaje.setCasta((String) cbxCasta.getSelectedItem());
personaje.setFuerza(
Integer.parseInt(vecFuerza[cbxCasta.getSelectedIndex()]));
personaje.setDestreza(
Integer.parseInt(vecDestreza[cbxCasta.getSelectedIndex()]));
personaje.setInteligencia(
Integer.parseInt(vecInteligencia[cbxCasta.getSelectedIndex()]));
try {
// Le envio los datos al servidor
cliente.getPaquetePersonaje().setComando(Comando.CREACIONPJ);
cliente.getSalida()
.writeObject(gson.toJson(cliente.getPaquetePersonaje()));
dispose();
} catch (JsonSyntaxException | IOException esd) {
JOptionPane.showMessageDialog(null, "Error al crear personaje");
}
}
}
| Checstyles finales
Se completan los checkstyles del cliente | src/main/java/frames/MenuCreacionPj.java | Checstyles finales |
|
Java | apache-2.0 | a07785b3d49155d60c22b27dff5186429ce7915f | 0 | shevek/spring-rich-client,shevek/spring-rich-client | /*
* Copyright 2002-2004 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.springframework.binding;
import java.util.Map;
/**
* Simple interface for accessing metadata about a particular property.
*
* @author Keith Donald
*/
public interface PropertyMetadataAccessStrategy {
/**
* Determine if the given property is readable.
*
* @param propertyName property to examine.
* @return <code>true</code> if the property is readable.
*/
boolean isReadable(String propertyName);
/**
* Determine if the given property is writeable.
*
* @param propertyName property to examine.
* @return <code>true</code> if the property is writeable.
*/
boolean isWriteable(String propertyName);
/**
* Get the type of the given property.
*
* @param propertyName property to examine.
* @return the type of the property.
*/
Class getPropertyType(String propertyName);
/**
* Returns custom metadata that may be associated with the specified
* property path.
*
* @param propertyName property to examine.
* @param key used to retreive the metadata.
* @return Object stored under the given key.
*/
Object getUserMetadata(String propertyName, String key);
/**
* Returns all custom metadata associated with the specified property in the
* form of a Map.
*
* @param propertyName property to examine.
* @return Map containing String keys - this method may or may not return
* <code>null</code> if there is no custom metadata associated with the
* property.
*/
Map getAllUserMetadata(String propertyName);
} | binding/src/main/java/org/springframework/binding/PropertyMetadataAccessStrategy.java | /*
* Copyright 2002-2004 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.springframework.binding;
import java.util.Map;
/**
* Simple interface for accessing metadata about a particular property.
*
* EXPERIMENTAL - not yet fit for general use
* @author Keith Donald
*/
public interface PropertyMetadataAccessStrategy {
public boolean isReadable(String propertyName);
public boolean isWriteable(String propertyName);
public Class getPropertyType(String propertyName);
/**
* Returns custom metadata that may be associated with the specified
* property path.
*/
Object getUserMetadata(String propertyName, String key);
/**
* Returns all custom metadata associated with the specified property
* in the form of a Map.
*
* @return Map containing String keys - this method may or may not return
* <code>null</code> if there is no custom metadata associated
* with the property.
*/
Map getAllUserMetadata(String propertyName);
} | Added javadoc
git-svn-id: 789609e278efc0cd74c84a9bb7abaca0738de801@1786 817809c7-9d0e-0410-b92d-a7ac8b8adc13
| binding/src/main/java/org/springframework/binding/PropertyMetadataAccessStrategy.java | Added javadoc |
|
Java | apache-2.0 | 6fd585a275a95dbabbad60bf31e1ffff6eddfa92 | 0 | SpineEventEngine/base,SpineEventEngine/base,SpineEventEngine/base | /*
* Copyright 2019, TeamDev. All rights reserved.
*
* Redistribution and use in source and/or binary forms, with or without
* modification, must retain the above copyright notice and the following
* disclaimer.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package io.spine.js.generate.index;
import com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.Descriptors;
import io.spine.code.js.Directory;
import io.spine.code.js.FileName;
import io.spine.code.proto.FileSet;
import io.spine.code.proto.TypeSet;
import io.spine.js.generate.GenerationTask;
import io.spine.js.generate.output.CodeLines;
import io.spine.js.generate.output.FileWriter;
import io.spine.js.generate.output.snippet.Import;
import java.util.Collection;
import java.util.Set;
import static io.spine.code.js.LibraryFile.INDEX;
import static io.spine.js.generate.output.CodeLine.emptyLine;
import static java.util.stream.Collectors.toSet;
/**
* The task to generate the {@code index.js} for generated Protobuf types.
*
* <p>The index file is used by the Spine Web to register known types and their parsers.
*
* <p>The index file provides:
* <ul>
* <li>The map of known types.
* <li>The map of parsers for known types.
* </ul>
*/
public final class GenerateIndexFile extends GenerationTask {
public GenerateIndexFile(Directory generatedRoot) {
super(generatedRoot);
}
@Override
protected void generateFor(FileSet fileSet) {
CodeLines code = codeFor(fileSet);
FileWriter writer = FileWriter.createFor(generatedRoot(), INDEX);
writer.write(code);
}
@VisibleForTesting
static CodeLines codeFor(FileSet fileSet) {
CodeLines lines = new CodeLines();
lines.append(knownTypesImports(fileSet));
lines.append(emptyLine());
lines.append(new KnownTypesMap(fileSet));
lines.append(emptyLine());
lines.append(new TypeParsersMap(fileSet));
return lines;
}
/**
* Generates import statements for all files declaring generated messages.
*/
private static CodeLines knownTypesImports(FileSet fileSet) {
Collection<Descriptors.FileDescriptor> files = fileSet.files();
Set<FileName> imports = files.stream()
.filter(file -> !TypeSet.from(file)
.messagesAndEnums()
.isEmpty())
.map(FileName::from)
.collect(toSet());
CodeLines importLines = new CodeLines();
for (FileName fileName : imports) {
Import fileImport = Import.fileRelativeToRoot(fileName);
importLines.append(fileImport);
}
return importLines;
}
}
| tools/proto-js-plugin/src/main/java/io/spine/js/generate/index/GenerateIndexFile.java | /*
* Copyright 2019, TeamDev. All rights reserved.
*
* Redistribution and use in source and/or binary forms, with or without
* modification, must retain the above copyright notice and the following
* disclaimer.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package io.spine.js.generate.index;
import com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.Descriptors;
import io.spine.code.js.Directory;
import io.spine.code.js.FileName;
import io.spine.code.proto.FileSet;
import io.spine.code.proto.TypeSet;
import io.spine.js.generate.GenerationTask;
import io.spine.js.generate.output.CodeLines;
import io.spine.js.generate.output.FileWriter;
import io.spine.js.generate.output.snippet.Import;
import java.util.Collection;
import java.util.Set;
import static io.spine.code.js.LibraryFile.INDEX;
import static io.spine.js.generate.output.CodeLine.emptyLine;
import static java.util.stream.Collectors.toSet;
/**
* The task to generate the {@code index.js} for generated Protobuf types.
*
* <p>The index file is used by the Spine Web to register known types and their parsers.
*
* <p>The index file provides:
* <ul>
* <li>The map of known types.
* <li>The map of parsers for known types.
* </ul>
*/
public final class GenerateIndexFile extends GenerationTask {
public GenerateIndexFile(Directory generatedRoot) {
super(generatedRoot);
}
@Override
protected void generateFor(FileSet fileSet) {
CodeLines code = codeFor(fileSet);
FileWriter writer = FileWriter.createFor(generatedRoot(), INDEX);
writer.write(code);
}
@VisibleForTesting
static CodeLines codeFor(FileSet fileSet) {
CodeLines lines = new CodeLines();
lines.append(knownTypesImports(fileSet));
lines.append(emptyLine());
lines.append(new KnownTypesMap(fileSet));
lines.append(emptyLine());
lines.append(new TypeParsersMap(fileSet));
return lines;
}
/**
* Generates import statements for all files declaring generated messages.
*/
private static CodeLines knownTypesImports(FileSet fileSet) {
Collection<Descriptors.FileDescriptor> files = fileSet.files();
Set<FileName> imports = files.stream()
.filter(file -> !TypeSet.from(file)
.isEmpty())
.map(FileName::from)
.collect(toSet());
CodeLines importLines = new CodeLines();
for (FileName fileName : imports) {
Import fileImport = Import.fileRelativeToRoot(fileName);
importLines.append(fileImport);
}
return importLines;
}
}
| Fix known type imports
Do not import files that contain only services which are not listed
among known types anymore.
| tools/proto-js-plugin/src/main/java/io/spine/js/generate/index/GenerateIndexFile.java | Fix known type imports |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.